Exemplo n.º 1
0
	def __html__(self):
		markedup = cache.get('markup_' + str(self.key()))
		if not markedup:
			markedup = markup(self.body, self.markup)
			cache.set('markup_' + str(self.key()), markedup)

		return markedup
Exemplo n.º 2
0
def input_redis():
    profile_id = "%s" % get_id()
    username = request.form.get("username")
    first_name = request.form.get("first_name")
    last_name = request.form.get("last_name")

    # generate fullname
    fullname = first_name + " " + last_name

    # save into redis
    dict_cache_profile = json.dumps({
        "id": profile_id,
        "username": username,
        "fullname": fullname
    })

    cache.set(key=KEY_CACHE + "_" + profile_id, value=dict_cache_profile)

    response = {
        "status": 200,
        "message": "Berhasil membuat profile",
        "result": {
            "id": profile_id,
            "username": username,
            "fullname": fullname,
        }
    }

    return jsonify(response)
Exemplo n.º 3
0
def add_event():
    try:
        _json = request.json
        _name = _json['name']
        _start = _json['start'] if 'start' in _json else takeTime()
        _end = _json['end']
        _lat = _json['lat']
        _long = _json['long']
        if _name and _start and _end and _lat and _long:
            sql = "INSERT INTO events(event_name, event_start, event_end, event_lat, event_long) VALUES(%s, %s, %s, %s, %s)"
            data = (_name, _start, _end, _lat, _long)
            conn = mysql.connect()
            cursor = conn.cursor()
            cursor.execute(sql, data)
            conn.commit()
            code = generateCode(cache.get_dict().keys())
            cache.set(code, cursor.lastrowid, timeout=getTimeout(_end))
            message = {"message": "Event added successfully", "code": code}
            resp = jsonify(message)
            resp.status_code = 200
            return resp
        else:
            return not_found
    except Exception as e:
        print(e)
Exemplo n.º 4
0
def create_app():
    app = Flask(__name__)

    config = {
        "production": "ProductionConfig",
        "development": "DevelopmentConfig"
    }

    app = Flask(__name__)

    config_name = os.environ.get('APPLICATION_ENV', 'development')
    config_module = importlib.import_module('config.settings')
    config_class = getattr(config_module, config[config_name])
    app.config.from_object(config_class())
    dictConfig(config_class.LOGGING_CONFIG)

    cache.init_app(app)
    if cache.get('images'):
        pass
    else:
        images = get_all_images()
        cache.set("images", images)

    images_blueprint = Blueprint('images', __name__)
    app.register_blueprint(images_blueprint)

    api = Api(images_blueprint)

    api.add_resource(ImageSearchController, '/images/<string:search_term>')
    api.add_resource(ImageController, '/images')

    return app
Exemplo n.º 5
0
    def get_cached(self, **kwargs):
        """Gets the model instance from the cache, or, if the instance is not in
        the cache, gets it from the database and puts it in the cache.
        """
        cache_settings = get_cache_settings(self.model)
        lookups = cache_settings.get("lookups")

        keys = kwargs.keys()
        single_kwarg_match = len(keys) == 1 and keys[0] in lookups
        multi_kwarg_match = len(keys) != 1 and any(
            sorted(keys) == sorted(lookup) for lookup in lookups if isinstance(lookup, (list, tuple))
        )
        if not single_kwarg_match and not multi_kwarg_match:
            raise ValueError("Caching not allowed with kwargs %s" % ", ".join(keys))

        # Get object from cache or db.
        key = generate_base_key(self.model, **kwargs)
        obj = cache.get(key)
        if obj is not None:
            if isinstance(obj, ObjectDoesNotExist):
                raise self.model.DoesNotExist(repr(obj))
            elif isinstance(obj, MultipleObjectsReturned):
                raise self.model.MultipleObjectsReturned(repr(obj))
            else:
                return obj
        try:
            obj = self.get(**kwargs)
        except (ObjectDoesNotExist, MultipleObjectsReturned), e:
            # The model-specific subclasses of these exceptions are not
            # pickleable, so we cache the base exception and reconstruct the
            # specific exception when fetching from the cache.
            obj = e.__class__.__base__(repr(e))
            cache.set(key, obj, cache_settings.get("timeout"))
            raise
Exemplo n.º 6
0
def recommend_article():
    hour = datetime.now().hour
    minute = datetime.now().minute
    recommend_article_cache = cache.get('recommend_cache')
    try:
        if (not recommend_article_cache) or \
        ((hour>9 and minute > 0) and (hour>9 and minute <= 2)):
            print('Not cache')
            content = requests.get(
                f"http://127.0.0.1:8010/recommend_api/content_clear")
            top_rank = requests.get(
                f"http://127.0.0.1:8010/recommend_api/top_rank")
            content = content.json()
            top_rank = top_rank.json()
            recommend_article = qr.recommend_article(content,
                                                     top_rank["top_rank"])
            json_recommend_article = jsonify(recommend_article)
            json_recommend_article.status_code = 200
            cache.set('recommend_cache', json_recommend_article, timeout=86400)
            return (json_recommend_article)
        else:
            print('content_cache')
            return recommend_article_cache
    finally:
        print('request get /recommend_article')
Exemplo n.º 7
0
def gsc_tag():
    hour = datetime.now().hour
    minute = datetime.now().minute
    gsc_table = cache.get('supertaste_tag_cache')
    if not gsc_table or \
        ((hour>0 and minute > 0) and (hour>0 and minute <= 1)) or ((hour>6 and minute > 0) and (hour>6 and minute <= 1)) or \
        ((hour>8 and minute > 0) and (hour>8 and minute <= 1)) or ((hour>10 and minute > 0) and (hour>10 and minute <= 1)) or \
        ((hour>12 and minute > 0) and (hour>12 and minute <= 1)) or ((hour>14 and minute > 0) and (hour>14 and minute <= 1)) or \
        ((hour>16 and minute > 0) and (hour>16 and minute <= 1)) or ((hour>18 and minute > 0) and (hour>18 and minute <= 1)) or \
        ((hour>20 and minute > 0) and (hour>20 and minute <= 1)) or ((hour>22 and minute > 0) and (hour>22 and minute <= 1)):
        tag_gsc = read_gzip.tmp_read('dict', 'supertaste')
        tag_gsc['search_content'] = tag_gsc["search_content"].map(
            lambda tag: tag.replace(' ', ','))
        tag_gsc['search_content'].replace('', np.nan, inplace=True)
        tag_gsc = tag_gsc.dropna(how='all')
        tag_gsc = tag_gsc.reset_index().rename(
            columns={tag_gsc.index.name: 'nid'})
        gsc_list = dataframe_to_json(tag_gsc)
        gsc_table = jsonify(gsc_list)
        gsc_table.status_code = 200
        cache.set('supertaste_tag_cache', gsc_table, timeout=7200)
        return gsc_table
    else:
        print('supertaste_tag_cache')
        return gsc_table
    def post(self):
        """
        Post method
        :return:
        """

        flask_request = FlaskRequest()
        launch_data_storage = FlaskCacheDataStorage(cache)
        message_launch = FlaskMessageLaunch(
            flask_request,
            current_app.config['LTI_TOOL_CONFIG'],
            launch_data_storage=launch_data_storage)
        message_launch_data = message_launch.get_launch_data()
        pprint.pprint(message_launch_data)
        token = message_launch.get_launch_id()
        cache.set(token, message_launch_data)

        redirection = redirect('http://localhost:3000/deepLinkContent/' +
                               token)

        return redirection

        redirection = redirect('http://localhost:3000/deepLinkContent/' +
                               token)

        return redirection
Exemplo n.º 9
0
def update_hist_graph_scatter(sentiment_term):
    try:
        if sentiment_term:
            df = pd.read_sql("SELECT sentiment.* FROM sentiment_fts fts LEFT JOIN sentiment ON fts.rowid = sentiment.id WHERE fts.sentiment_fts MATCH ? ORDER BY fts.rowid DESC LIMIT 10000", conn, params=(sentiment_term+'*',))
        else:
            df = pd.read_sql("SELECT * FROM sentiment ORDER BY id DESC, unix DESC LIMIT 10000", conn)
        df.sort_values('unix', inplace=True)
        df['date'] = pd.to_datetime(df['unix'], unit='ms')
        df.set_index('date', inplace=True)
        # save this to a file, then have another function that
        # updates because of this, using intervals to read the file.
        # https://community.plot.ly/t/multiple-outputs-from-single-input-with-one-callback/4970

        # store related sentiments in cache
        cache.set('related_terms', sentiment_term, related_sentiments(df, sentiment_term), 120)

        #print(related_sentiments(df,sentiment_term), sentiment_term)
        init_length = len(df)
        df['sentiment_smoothed'] = df['sentiment'].rolling(int(len(df)/5)).mean()
        df.dropna(inplace=True)
        df = df_resample_sizes(df,maxlen=500)
        X = df.index
        Y = df.sentiment_smoothed.values
        Y2 = df.volume.values

        data = plotly.graph_objs.Scatter(
                x=X,
                y=Y,
                name='Sentiment',
                mode= 'lines',
                yaxis='y2',
                line = dict(color = (app_colors['sentiment-plot']),
                            width = 4,)
                )

        data2 = plotly.graph_objs.Bar(
                x=X,
                y=Y2,
                name='Volume',
                marker=dict(color=app_colors['volume-bar']),
                )

        df['sentiment_shares'] = list(map(pos_neg_neutral, df['sentiment']))

        #sentiment_shares = dict(df['sentiment_shares'].value_counts())
        cache.set('sentiment_shares', sentiment_term, dict(df['sentiment_shares'].value_counts()), 120)

        return {'data': [data,data2],'layout' : go.Layout(xaxis=dict(range=[min(X),max(X)]), # add type='category to remove gaps'
                                                          yaxis=dict(range=[min(Y2),max(Y2*4)], title='Volume', side='right'),
                                                          yaxis2=dict(range=[min(Y),max(Y)], side='left', overlaying='y',title='sentiment'),
                                                          title='Longer-term sentiment for: "{}"'.format(sentiment_term),
                                                          font={'color':app_colors['text']},
                                                          plot_bgcolor = app_colors['background'],
                                                          paper_bgcolor = app_colors['background'],
                                                          showlegend=False)}

    except Exception as e:
        with open('errors.txt','a') as f:
            f.write(str(e))
            f.write('\n')
Exemplo n.º 10
0
    def get_cached(self, **kwargs):
        """Gets the model instance from the cache, or, if the instance is not in
        the cache, gets it from the database and puts it in the cache.
        """
        cache_settings = get_cache_settings(self.model)
        lookups = cache_settings.get("lookups")

        keys = kwargs.keys()
        single_kwarg_match = len(keys) == 1 and keys[0] in lookups
        multi_kwarg_match = len(keys) != 1 and any(
            sorted(keys) == sorted(lookup)
            for lookup in lookups if isinstance(lookup, (list, tuple)))
        if not single_kwarg_match and not multi_kwarg_match:
            raise ValueError("Caching not allowed with kwargs %s" %
                             ", ".join(keys))

        # Get object from cache or db.
        key = generate_base_key(self.model, **kwargs)
        obj = cache.get(key)
        if obj is not None:
            if isinstance(obj, ObjectDoesNotExist):
                raise self.model.DoesNotExist(repr(obj))
            elif isinstance(obj, MultipleObjectsReturned):
                raise self.model.MultipleObjectsReturned(repr(obj))
            else:
                return obj
        try:
            obj = self.get(**kwargs)
        except (ObjectDoesNotExist, MultipleObjectsReturned), e:
            # The model-specific subclasses of these exceptions are not
            # pickleable, so we cache the base exception and reconstruct the
            # specific exception when fetching from the cache.
            obj = e.__class__.__base__(repr(e))
            cache.set(key, obj, cache_settings.get("timeout"))
            raise
Exemplo n.º 11
0
def tvbs_news_tag_analysis():
    hour = datetime.now().hour
    minute = datetime.now().minute
    args = request.args
    day = args.get('day') if 'day' in args else 90
    news_tag_summary = cache.get('news_tag_cache' + str(day))
    try:
        if (not news_tag_summary) or\
            ((hour>0 and minute > 0) and (hour>0 and minute <= 1)) or ((hour>6 and minute > 0) and (hour>6 and minute <= 1)) or \
            ((hour>8 and minute > 0) and (hour>8 and minute <= 1)) or ((hour>10 and minute > 0) and (hour>10 and minute <= 1)) or \
            ((hour>12 and minute > 0) and (hour>12 and minute <= 1)) or ((hour>14 and minute > 0) and (hour>14 and minute <= 1)) or \
            ((hour>16 and minute > 0) and (hour>16 and minute <= 1)) or ((hour>18 and minute > 0) and (hour>18 and minute <= 1)) or \
            ((hour>20 and minute > 0) and (hour>20 and minute <= 1)) or ((hour>22 and minute > 0) and (hour>22 and minute <= 1)):
            #            if (not news_tag_summary):
            print('Not cache')
            back_tag_of_dfItem = tra.cache_article_table(
                'news').get_aws_table_cache(day)
            tag_summary = tra.editorTag('news', back_tag_of_dfItem,
                                        'N').editor_tag_summary()
            summary_list = dataframe_to_json(tag_summary)
            news_tag_summary = jsonify(summary_list)
            news_tag_summary.status_code = 200
            cache.set('news_tag_cache' + str(day),
                      news_tag_summary,
                      timeout=7200)
            return news_tag_summary
        else:
            print('news_tag_cache')
            return news_tag_summary
    finally:
        print('request get /tvbs_news_tag_analysis')
Exemplo n.º 12
0
def tvbs_news_tag_analysis():
    hour = datetime.now().hour
    minute = datetime.now().minute
#    args = request.args
    search_console = request.args.get('gsc', 'Y', type = str)
    day = request.args.get('day',90 , type = int)
    news_tag_summary = cache.get('health_tag_cache'+str(day)+search_console)
    try:
        if (not news_tag_summary) or\
            ((hour>0 and minute > 0) and (hour>0 and minute <= 1)) or ((hour>6 and minute > 0) and (hour>6 and minute <= 1)) or \
            ((hour>8 and minute > 0) and (hour>8 and minute <= 1)) or ((hour>10 and minute > 0) and (hour>10 and minute <= 1)) or \
            ((hour>12 and minute > 0) and (hour>12 and minute <= 1)) or ((hour>14 and minute > 0) and (hour>14 and minute <= 1)) or \
            ((hour>16 and minute > 0) and (hour>16 and minute <= 1)) or ((hour>18 and minute > 0) and (hour>18 and minute <= 1)) or \
            ((hour>20 and minute > 0) and (hour>20 and minute <= 1)) or ((hour>22 and minute > 0) and (hour>22 and minute <= 1)):
#            if (not news_tag_summary):
            print('Not cache')
            back_tag_of_dfItem = tra.cache_article_table('health').get_aws_table_cache(day)
            if search_console =='Y':
                tag_summary = tra.editorTag('health',back_tag_of_dfItem,'Y').editor_tag_summary()
            else:
                tag_summary = tra.editorTag('health',back_tag_of_dfItem,'N').editor_tag_summary()
            summary_list = dataframe_to_json(tag_summary)
            news_tag_summary = jsonify(summary_list)
            news_tag_summary.status_code=200
            cache.set('health_tag_cache'+str(day)+search_console,news_tag_summary,timeout=7200)
            return news_tag_summary
        else:
            print('health_tag_cache')
            return news_tag_summary
    finally:
        print('request get /tvbs_health_tag_analysis')
Exemplo n.º 13
0
def generate_sitemap():
	"""Deferred task to generate a Sitemap.xml for all published blog posts"""

	root = etree.Element('urlset', { 'attr': 'http://www.sitemaps.org/schemas/sitemap/0.9' })

	def add_url(location, last_modified=None, change_freq='always', priority=0.5):
		e = etree.SubElement(root, 'url')
		etree.SubElement(e, 'loc').text = location
		if last_modified:
			etree.SubElement(e, 'lastmod').text = last_modified.strftime('%Y-%m-%dT%H:%M:%S+00:00')
		etree.SubElement(e, 'changefreq').text = change_freq
		etree.SubElement(e, 'priority').text = str(priority)

	for p in Post.objects_published().order('-created_at'):
		add_url(p.absolute_url(external=True), p.updated_at)

	for c in Category.all():
		add_url(c.absolute_url(external=True))

	add_url(url_for('blog.index', _external=True), priority=1.0)
	add_url(url_for('blog.latest_posts', _external=True))

	logging.info('Generated sitemap.xml with %d blog posts', len(root))
	xml = etree.tostring(root, encoding='utf-8', pretty_print=True, xml_declaration=True)
	cache.set('blog_sitemaps', xml)
	return xml
Exemplo n.º 14
0
def create_game():
    num_players = int(request.args.get('players', 1))
    game = Game(update_log)
    game.num_players = num_players
    game_state = game.serialize()
    update_log(game.game_id, 'CREATE GAME: {} players'.format(num_players))
    cache.set(game.game_id, game_state)
    return game.game_id, 201
Exemplo n.º 15
0
def generate_post_months():
	"""Deferred task to generate a list of months for all blog posts"""

	months = set()
	for p in db.Query(Post, projection=('created_at',)):
		months.add(datetime(p.created_at.year, p.created_at.month, 1))
	cache.set('list-post-months', months)
	return 'OK'
Exemplo n.º 16
0
    def get_many_cached(self, list_of_kwargs):
        """Gets the model instance from the cache, or, if the instance is not in
        the cache, gets it from the database and puts it in the cache.
        """
        cache_settings = get_cache_settings(self.model)
        lookups = cache_settings.get("lookups")
        prefetch = cache_settings.get("prefetch")

        related = self._get_select_related_from_attrs(self.model, prefetch)
        if related:
            base_qs = self.all().select_related(*related)
        else:
            base_qs = self.all()

        cache_keys = dict()

        for kwargs in list_of_kwargs:
            keys = kwargs.keys()
            single_kwarg_match = len(keys) == 1 and keys[0] in lookups
            multi_kwarg_match = len(keys) != 1 and any(
                sorted(keys) == sorted(lookup) for lookup in lookups if isinstance(lookup, (list, tuple))
            )
            if not single_kwarg_match and not multi_kwarg_match:
                raise ValueError("Caching not allowed with kwargs %s" % ", ".join(keys))

            # Get object from cache or db.
            key = generate_base_key(self.model, **kwargs)
            cache_keys[key] = kwargs

        objects = cache.get_many(cache_keys.keys())
        pending_cache_update = dict()
        cached_objects = list()

        for key, kwargs in cache_keys.iteritems():
            obj = objects.get(key, None)
            if obj is not None:
                if isinstance(obj, ObjectDoesNotExist):
                    raise self.model.DoesNotExist(repr(obj))
                elif isinstance(obj, MultipleObjectsReturned):
                    raise self.model.MultipleObjectsReturned(repr(obj))
                else:
                    cached_objects.append(obj)
                    continue
            try:
                obj = base_qs.get(**kwargs)
            except (ObjectDoesNotExist, MultipleObjectsReturned), e:
                # The model-specific subclasses of these exceptions are not
                # pickleable, so we cache the base exception and reconstruct the
                # specific exception when fetching from the cache.
                obj = e.__class__.__base__(repr(e))
                cache.set(key, obj, cache_settings.get("timeout"))
                raise

            self._prefetch_related(obj, prefetch)
            self._tag_object_as_from_cache(obj)
            pending_cache_update[key] = obj
            cached_objects.append(obj)
Exemplo n.º 17
0
def ec_content():
    minute = datetime.now().minute
    table = cache.get('ec_content')
    if not table or (minute in [0, 15, 30, 45]):
        content = get_data('ec_content')
        table = content.to_json(force_ascii=False)
        cache.set('ec_content', table, timeout=3600)
        return table
    else:
        return table
Exemplo n.º 18
0
def get_jsapi_ticket():
    key = key_of_jsapi_ticket()
    jsapi_ticket = cache.get(key)
    if not jsapi_tiket:
        ticket = wechat_client.get_jsapi_ticket()
        jsapi_ticket = data['jsapi_ticket']
        expires_at = data['jsapi_ticket_expires_at']
        timeout = expires_at - int(time.time())
        cache.set(key, jsapi_ticket, timeout)
    return jsapi_ticket
Exemplo n.º 19
0
def content_update():
    minute = datetime.now().minute
    table = cache.get('supertaste_update')
    if not table or (minute in [0, 15, 30, 45]):
        content = get_data('supertaste_update', 1080)
        table = content.to_json(force_ascii=False)
        cache.set('supertaste_update', table, timeout=3600)
        return table
    else:
        return table
Exemplo n.º 20
0
 def _invalidate_cache(self, instance):
     """
     Explicitly set a None value instead of just deleting so we don't have any race
     conditions where:
         Thread 1 -> Cache miss, get object from DB
         Thread 2 -> Object saved, deleted from cache
         Thread 1 -> Store (stale) object fetched from DB in cache
     Five second should be more than enough time to prevent this from happening for
     a web app.
     """
     cache.set(instance.cache_key, None, 5)
Exemplo n.º 21
0
def news_recom_batch_update():
    #            args = request.args
    #            batch = args.get('batch_date') if 'batch_date' in args else None
    batch = request.args.get('batch_date', 1, type=int)
    #            batch = request.args['batch_date']
    #            now = datetime.now()
    hour = datetime.now().hour
    minute = datetime.now().minute
    if batch == 0:
        news_update_table_all = cache.get('news_cache_' + str(batch))
        if not news_update_table_all:
            print('Not cache')
            db_config.gcp_db()
            insert = """SELECT nid,recom_nid FROM NMServer.News_Recommend;"""
            conn = mysql.connect()
            cur = conn.cursor(pymysql.cursors.DictCursor)
            cur.execute(insert)
            rows = cur.fetchall()
            news_update_table_all = jsonify(rows)
            news_update_table_all.status_code = 200
            cache.set('news_cache_' + str(batch),
                      news_update_table_all,
                      timeout=7200)
            cur.close()
            conn.close()
            return news_update_table_all
        else:
            print('Cache news_batch')
            return news_update_table_all
    elif batch == 1:
        news_update_table = cache.get('news_cache_' + str(batch))
        if not news_update_table or \
        ((hour>1 and minute > 0) and (hour>1 and minute <= 1)) or ((hour>18 and minute > 0) and (hour>18 and minute <= 1)):
            print('Not cache')
            db_config.gcp_db()
            insert = """SELECT nid,recom_nid FROM NMServer.News_Recommend WHERE date(last_modified_date) = CURDATE();"""
            conn = mysql.connect()
            cur = conn.cursor(pymysql.cursors.DictCursor)
            cur.execute(insert)
            rows = cur.fetchall()
            news_update_table = jsonify(rows)
            news_update_table.status_code = 200
            cache.set('news_cache_' + str(batch),
                      news_update_table,
                      timeout=7200)
            cur.close()
            conn.close()
            return news_update_table
        else:
            print('news_batch_cache_True')
            return news_update_table
    else:
        return jsonify(
            {"error": "error information. please input batch_date 0 or 1"})
Exemplo n.º 22
0
 def _invalidate_cache(self, instance):
     """
     Explicitly set a None value instead of just deleting so we don't have any race
     conditions where:
         Thread 1 -> Cache miss, get object from DB
         Thread 2 -> Object saved, deleted from cache
         Thread 1 -> Store (stale) object fetched from DB in cache
     Five second should be more than enough time to prevent this from happening for
     a web app.
     """
     cache.set(instance.cache_key, None, 5)
Exemplo n.º 23
0
 def resolve_articles(self, info, urls):
     results = []
     for url in urls:
         try:
             result = get_article(url)
             results.append(ArticleSchema(**result))
             cache.set(url, json.dumps(result))
         except Exception:
             results.append(
                 GraphQLError('Failed to parse url {}'.format(url)))
             pass
     return results
Exemplo n.º 24
0
def gsc():
    #        now = datetime.now()
    #        now_time = now.time()
    table = cache.get('gsc')
    if not table:
        tmp = get_data('gsc', domain='Health')
        table = tmp.to_json(force_ascii=False)
        #            cache.set('news_content_tmp',content,timeout=86100)
        cache.set('gsc', table, timeout=86100)
        return table
    else:
        return table
Exemplo n.º 25
0
	def __html__(self):
		markedup = cache.get('markup_' + str(self.key()))
		if not markedup:
			# check markup library?
			func = current_app.jinja_env.filters.get('markup')
			if not func:
				return self.content

			markedup = func(self.content, 'Markdown')
			cache.set('markup_' + str(self.key()), markedup)

		return markedup
Exemplo n.º 26
0
def index():
    client_id = request.cookies.get('client_id', False)
    res = make_response(render_template('index.html'))
    if not client_id:
        client_id = set_client_id()
        res.set_cookie('client_id', client_id)
    cache_api = cache.get(client_id)
    if cache_api is None:
        client_api = API()
        cache.set(client_id, client_api)
    else:
        cache.set(client_id, cache_api, timeout=60 * 60 * 24 * 1)
    return res
Exemplo n.º 27
0
def dislike_undo():
    username = cache.get("username")
    article_id = request.form["id"]
    doc_type = request.form["doc_type"]
    delete_user_feedback(username, "dislike", article_id)
    entries = get_users_likes(username)
    script = {"script": {"inline": "ctx._source.dislike -= 1"}}
    es.update(index="testindex", doc_type=doc_type, body=script, id=article_id)
    logs = []
    for entry in entries:
        logs.append(entry["id"])
    cache.set(username + "_like_log", logs)
    return jsonify({})
Exemplo n.º 28
0
  def post(self):
    email = self.get_argument("email", None)
    first_name = self.get_argument("first_name", None)
    last_name = self.get_argument("last_name", None)
    contact_number = self.get_argument("contact_number", None)
    title = self.get_argument("title", None)
    content = self.get_argument("content", None)
    link = self.get_argument("link", None)
    
    cache.set(str(email), [first_name, last_name, contact_number, title, content, link])
    send_mail([email], settings.DEFAULT_EMAIL_SUBJECT, settings.DEFAULT_EMAIL_CONTENT % (last_name))
    send_mail([settings.DEDICATED_EMAIL_USER], settings.DEDICATED_EMAIL_SUBJECT % (email), settings.DEDICATED_EMAIL_CONTENT % (last_name, first_name, datetime.datetime.now()))

    self.finish({"status":200})
Exemplo n.º 29
0
 def create_token(self, user):
     payload = {
         'sub': user,
         'iat': datetime.utcnow(),
         'exp': datetime.utcnow() + timedelta(days=1)
     }
     token = jwt.encode(payload, self.TOKEN_SECRET)
     token = token.decode('unicode_escape')
     print 'token creado '
     print token
     cache.set('token_' + token, user, 86400)
     print ' '
     print 'token cacheado'
     print cache.get('token_' + token)
     return token
Exemplo n.º 30
0
def like():
    username = cache.get("username")
    title = request.form["title"]
    article_id = request.form["id"]
    doc_type = request.form["doc_type"]
    print(doc_type)
    date = datetime.now().strftime("%Y/%m/%d %H:%M:%S")
    insert_user_feedback(username, title, date, "like", article_id)
    entries = get_users_likes(username)
    script = {"script": {"inline": "ctx._source.like +=1"}}
    es.update(index="testindex", doc_type=doc_type, body=script, id=article_id)
    logs = []
    for entry in entries:
        logs.append(entry["id"])
    cache.set(username + "_like_log", logs)
    return jsonify({})
Exemplo n.º 31
0
def content():
    minute = datetime.now().minute
    table = cache.get('health_content')
    day = request.args.get('day', 1, type = int)
    if not table or (minute in [0,15,30,45]):
        content = get_data('health_content', 1080)
        table = content.to_json(force_ascii=False)
        cache.set('health_content',table,timeout=3600)
        filt_tmp = content[content['date']>=date.today() - timedelta(days=day)]
        filt_tmp = filt_tmp.drop(['date'], axis=1)
        filt = filt_tmp.to_json(force_ascii=False)
        return filt
    else:
        content = pd.read_json(table)
        filt_tmp = content[content['date']>=date.today() - timedelta(days=day)]
        filt_tmp = filt_tmp.drop(['date'], axis=1)
        filt = filt_tmp.to_json(force_ascii=False)
        return filt
Exemplo n.º 32
0
def dislike():
    username = cache.get("username")
    title = request.form["title"]
    id = request.form["id"]
    doc_type = request.form["doc_type"]
    date = datetime.now().strftime("%Y/%m/%d %H:%M:%S")
    insert_user_feedback(username, title, date, "dislike", id)
    entries = get_users_dislikes(username)
    res = es.get(index="testindex", doc_type=doc_type, id=id)
    print(res["_source"]["like"])
    if (res["_source"]["like"] > 0):
        script = {"script": {"inline": "ctx._source.like -= 1"}}
        es.update(index="testindex", doc_type=doc_type, id=id, body=script)
    logs = []
    for entry in entries:
        logs.append(entry["id"])
    cache.set(username + "_dislike_log", logs)
    return jsonify({})
Exemplo n.º 33
0
def dict():
    minute = datetime.now().minute
    table = cache.get('dict')
    domain = request.args.get('domain', '', type=str)
    if not table or (minute in [0, 15, 30, 45]):
        content = get_data('dict')
        table = content.to_json(force_ascii=False)
        cache.set('dict', table, timeout=3600)
        filt_tmp = content[content['domain'].isin(domain.split(','))]
        filt_tmp = filt_tmp.drop(['domain'], axis=1)
        filt = filt_tmp.to_json(force_ascii=False)
        return filt
    else:
        content = pd.read_json(table)
        filt_tmp = content[content['domain'].isin(domain.split(','))]
        filt_tmp = filt_tmp.drop(['domain'], axis=1)
        filt = filt_tmp.to_json(force_ascii=False)
        return filt
Exemplo n.º 34
0
def user_prefer():
    hour = datetime.now().hour
    minute = datetime.now().minute
    prefer_cache = cache.get('prefer_cache')
    try:
        if (not prefer_cache) or \
        ((hour>9 and minute > 0) and (hour>9 and minute <= 2)):
            df_users_prefer_score = qr.user_prefer()
            js_user_prefer_score = dataframe_to_json(df_users_prefer_score)
            json_user_prefer = jsonify(js_user_prefer_score)
            json_user_prefer.status_code = 200
            cache.set('prefer_cache', json_user_prefer, timeout=7200)
            return (json_user_prefer)
        else:
            print('prefer_cache')
            return prefer_cache
    finally:
        print('request get /prefer_cache')
Exemplo n.º 35
0
def content_clear():
    hour = datetime.now().hour
    minute = datetime.now().minute
    content_cache = cache.get('content_cache')
    try:
        if (not content_cache) or \
        ((hour>9 and minute > 0) and (hour>9 and minute <= 2)):
            print('Not cache')
            dict_sc = qr.cleaning_content()
            json_dict_sc = jsonify(dict_sc)
            json_dict_sc.status_code = 200
            cache.set('content_cache', json_dict_sc, timeout=86400)
            return (json_dict_sc)
        else:
            print('content_cache')
            return content_cache
    finally:
        print('request get /content_clear')
Exemplo n.º 36
0
def result():
    minute = datetime.now().minute
    table = cache.get('recommend')
#        article_id = request.args.get('article_id', 1, type = int)
    if not table or (minute > 0 and minute <= 5) or (minute > 15 and minute <= 20) or (minute >= 30 and minute <= 35) or (minute >= 45 and minute <= 50):
        tmp = get_data('recommend_list',domain = 'Health')
        table = tmp.to_json(force_ascii=False)
#            cache.set('news_content_tmp',content,timeout=86100)
        cache.set('recommend',table,timeout=86100)
#            filt_tmp = tmp[tmp['article_id']==article_id]
#            filt_tmp = filt_tmp.drop(['article_id'], axis=1)
#            filt = filt_tmp.to_json(force_ascii=False)
        return table
    else:
#            tmp = pd.read_json(table)
#            filt_tmp = tmp[tmp['article_id']==article_id]
#            filt_tmp = filt_tmp.drop(['article_id'], axis=1)
#            filt = filt_tmp.to_json(force_ascii=False)
        return table
Exemplo n.º 37
0
def generate_sitemap():
	"""Deferred task to generate a Sitemap.xml for all published flat pages"""

	root = etree.Element('urlset', { 'attr': 'http://www.sitemaps.org/schemas/sitemap/0.9' })

	def add_url(location, last_modified=None, change_freq='always', priority=0.5):
		e = etree.SubElement(root, 'url')
		etree.SubElement(e, 'loc').text = location
		if last_modified:
			etree.SubElement(e, 'lastmod').text = last_modified.strftime('%Y-%m-%dT%H:%M:%S+00:00')
		etree.SubElement(e, 'changefreq').text = change_freq
		etree.SubElement(e, 'priority').text = str(priority)

	for p in Flatpage.all():
		add_url(p.absolute_url(external=True))

	logging.info('Generated sitemap.xml with %d flatpages.', len(root))
	xml = etree.tostring(root, encoding='utf-8', pretty_print=True, xml_declaration=True)
	cache.set('flatpages_sitemaps', xml)
	return xml
Exemplo n.º 38
0
def top_rank():
    hour = datetime.now().hour
    minute = datetime.now().minute
    top_rank_cache = cache.get('top_rank')
    try:
        if (not top_rank_cache) or \
        ((hour>9 and minute > 0) and (hour>9 and minute <= 2)):
            print('Not cache')
            content = requests.get(
                f"http://127.0.0.1:8010/recommend_api/content_clear")
            content = content.json()
            top_rank = qr.Top_Ranking(content)
            json_top_rank = jsonify({"top_rank": top_rank})
            json_top_rank.status_code = 200
            cache.set('top_rank', json_top_rank, timeout=86400)
            return (json_top_rank)
        else:
            print('content_cache')
            return top_rank_cache
    finally:
        print('request get /top_rank')
Exemplo n.º 39
0
    def _get_details(self):
        details = cache.get("IPDetails:{}".format(self.address))
        if details:
            return details

        details = {"ASN": None, "Holder": None, "Prefix": None}

        if self.ip_object.iptype() in ['RESERVED', 'UNSPECIFIED', 'LOOPBACK',
                                       'UNASSIGNED', 'DOCUMENTATION', 'ULA',
                                       'LINKLOCAL', 'PRIVATE']:
            return details

        found = False
        for cache_entry in cache.keys():
            if cache_entry.startswith("IPDetailsPrefix:"):
                prefix_details = cache.get(cache_entry)
                prefix = IPy.IP(prefix_details["Prefix"])

                if self.ip_object in prefix:
                    details = prefix_details
                    found = True
                    break

        if not found:
            URL = IP.RIPESTAT_URL.format(ip=self.address)

            res = json.loads(urllib2.urlopen(URL).read())

            if res["status"] == "ok":
                if res["data"]["asns"] != []:
                    details["ASN"] = str(res["data"]["asns"][0]["asn"])
                    details["Holder"] = res["data"]["asns"][0]["holder"]
                    details["Prefix"] = res["data"]["resource"]

                    cache.set("IPDetailsPrefix:{}".format(details["Prefix"]),
                              details, 60*60*24*7)

        cache.set("IPDetails:{}".format(self.address), details, 60*60*24*7)

        return details
Exemplo n.º 40
0
def update_pos_neg_neutral_cache(sentiment_term, df):
    """
    This method is used for updating positive, negative and neutral counts when reddit streaming comes in
    df is only the incremented part of the sentiment table
    """
    THRESHOLD=0.3
    pos = len(list([x for x in df["sentiment"] if float(x)>=THRESHOLD]))
    neg = len(list([x for x in df["sentiment"] if float(x)<=-THRESHOLD]))
    neutral = len(list([x for x in df["sentiment"] if float(x)<THRESHOLD and float(x)>-THRESHOLD]))
    old_pos = cache.get("positive_count_{}".format(sentiment_term))
    old_neg = cache.get("negative_count_{}".format(sentiment_term))
    old_neu = cache.get("neutral_count_{}".format(sentiment_term))
    if old_pos:
        cache.client.incr("positive_count_{}".format(sentiment_term), pos)
    else:
        cache.set("positive_count_{}".format(sentiment_term), pos)
    
    if old_neg:
        cache.client.incr("negative_count_{}".format(sentiment_term), neg)
    else:
        cache.set("negative_count_{}".format(sentiment_term), neg)
    
    if old_neu:
        cache.client.incr("neutral_count_{}".format(sentiment_term), neutral)
    else:
        cache.set("neutral_count_{}".format(sentiment_term), neutral)
    return (pos, neg, neutral)
    def process_request(self, request):

        # Only run if this is a POST request
        if not request.method == 'POST':
            return None

        url = request.get_full_path()

        # Only operate on if the view is marked as enabling sticky files
        if not ('file_upload_cache' in request.session and
                url in request.session['file_upload_cache']):
            return None

        # Retrieve any previous request.FILES
        restored_files_dict = cache.get(request.session.session_key)
        if restored_files_dict:
            restored_files_dict.update(request.FILES)

            # Merge current request.FILES with anything we had previously
            request._files = restored_files_dict

        # Save request.FILES for subsequent requests
        if request.FILES:
            cache.set(request.session.session_key, request.FILES)
Exemplo n.º 42
0
def user_recomment():
    hour = datetime.now().hour
    minute = datetime.now().minute
    user_article_recommend = cache.get('user_article_recommend')
    try:
        if (not user_article_recommend) or \
        ((hour>9 and minute > 0) and (hour>9 and minute <= 2)):
            df_users_prefer_score = requests.get(
                f"http://127.0.0.1:8010/recommend_api/user_prefer")
            recommend_article = requests.get(
                f"http://127.0.0.1:8010/recommend_api/recommend_article")
            top_rank = requests.get(
                f"http://127.0.0.1:8010/recommend_api/top_rank")
            df_users_prefer_score = pd.DataFrame.from_records(
                df_users_prefer_score.json())
            recommend_article = recommend_article.json()
            top_rank = top_rank.json()
            df_user_recommend = qr.user_recomment(df_users_prefer_score,
                                                  recommend_article,
                                                  top_rank["top_rank"])
            df_user_recommend["recommend_article"] = [
                ','.join(map(str, l))
                for l in df_user_recommend['recommend_article']
            ]
            js_user_recommend = dataframe_to_json(df_user_recommend)
            json_user_recommend = jsonify(js_user_recommend)
            json_user_recommend.status_code = 200
            cache.set('user_article_recommend',
                      json_user_recommend,
                      timeout=86400)
            return (json_user_recommend)
        else:
            print('user_article_recommend')
            return user_article_recommend
    finally:
        print('request get /user_article_recommend')