예제 #1
0
def update_pos_neg_neutral_cache(sentiment_term, df):
    """
    This method is used for updating positive, negative and neutral counts when reddit streaming comes in
    df is only the incremented part of the sentiment table
    """
    THRESHOLD=0.3
    pos = len(list([x for x in df["sentiment"] if float(x)>=THRESHOLD]))
    neg = len(list([x for x in df["sentiment"] if float(x)<=-THRESHOLD]))
    neutral = len(list([x for x in df["sentiment"] if float(x)<THRESHOLD and float(x)>-THRESHOLD]))
    old_pos = cache.get("positive_count_{}".format(sentiment_term))
    old_neg = cache.get("negative_count_{}".format(sentiment_term))
    old_neu = cache.get("neutral_count_{}".format(sentiment_term))
    if old_pos:
        cache.client.incr("positive_count_{}".format(sentiment_term), pos)
    else:
        cache.set("positive_count_{}".format(sentiment_term), pos)
    
    if old_neg:
        cache.client.incr("negative_count_{}".format(sentiment_term), neg)
    else:
        cache.set("negative_count_{}".format(sentiment_term), neg)
    
    if old_neu:
        cache.client.incr("neutral_count_{}".format(sentiment_term), neutral)
    else:
        cache.set("neutral_count_{}".format(sentiment_term), neutral)
    return (pos, neg, neutral)
예제 #2
0
def news_recom_batch_update():
    #            args = request.args
    #            batch = args.get('batch_date') if 'batch_date' in args else None
    batch = request.args.get('batch_date', 1, type=int)
    #            batch = request.args['batch_date']
    #            now = datetime.now()
    hour = datetime.now().hour
    minute = datetime.now().minute
    if batch == 0:
        news_update_table_all = cache.get('news_cache_' + str(batch))
        if not news_update_table_all:
            print('Not cache')
            db_config.gcp_db()
            insert = """SELECT nid,recom_nid FROM NMServer.News_Recommend;"""
            conn = mysql.connect()
            cur = conn.cursor(pymysql.cursors.DictCursor)
            cur.execute(insert)
            rows = cur.fetchall()
            news_update_table_all = jsonify(rows)
            news_update_table_all.status_code = 200
            cache.set('news_cache_' + str(batch),
                      news_update_table_all,
                      timeout=7200)
            cur.close()
            conn.close()
            return news_update_table_all
        else:
            print('Cache news_batch')
            return news_update_table_all
    elif batch == 1:
        news_update_table = cache.get('news_cache_' + str(batch))
        if not news_update_table or \
        ((hour>1 and minute > 0) and (hour>1 and minute <= 1)) or ((hour>18 and minute > 0) and (hour>18 and minute <= 1)):
            print('Not cache')
            db_config.gcp_db()
            insert = """SELECT nid,recom_nid FROM NMServer.News_Recommend WHERE date(last_modified_date) = CURDATE();"""
            conn = mysql.connect()
            cur = conn.cursor(pymysql.cursors.DictCursor)
            cur.execute(insert)
            rows = cur.fetchall()
            news_update_table = jsonify(rows)
            news_update_table.status_code = 200
            cache.set('news_cache_' + str(batch),
                      news_update_table,
                      timeout=7200)
            cur.close()
            conn.close()
            return news_update_table
        else:
            print('news_batch_cache_True')
            return news_update_table
    else:
        return jsonify(
            {"error": "error information. please input batch_date 0 or 1"})
예제 #3
0
 def create_token(self, user):
     payload = {
         'sub': user,
         'iat': datetime.utcnow(),
         'exp': datetime.utcnow() + timedelta(days=1)
     }
     token = jwt.encode(payload, self.TOKEN_SECRET)
     token = token.decode('unicode_escape')
     print 'token creado '
     print token
     cache.set('token_' + token, user, 86400)
     print ' '
     print 'token cacheado'
     print cache.get('token_' + token)
     return token
    def post(self):
        """
        Post method to obtain the Deeplink content payload
        :return:
        """

        # parser = reqparse.RequestParser()
        logs.api_logger.info("DeepLinking Content: " + str(request.data),
                             extra={
                                 "clientip": request.remote_addr,
                                 'path': request.path,
                                 "user": request.remote_user
                             })

        request_json = json.loads(request.data)

        token = request_json["token"]
        jwt = cache.get(token)
        lti_content = request_json["deep_link_content"]

        content = deep_link_content(lti_content)

        deep_link_jwt = self.get_message_jwt(jwt, content)
        signed_jwt = self.encode_jwt(deep_link_jwt, jwt['aud'], jwt['iss'])

        return {
            'deep_link_response':
            deep_link_jwt,
            'signed_jwt':
            signed_jwt,
            'return_url':
            jwt['https://purl.imsglobal.org/spec/lti-dl/claim/deep_linking_settings']
            ['deep_link_return_url']
        }
예제 #5
0
    def get_cached(self, **kwargs):
        """Gets the model instance from the cache, or, if the instance is not in
        the cache, gets it from the database and puts it in the cache.
        """
        cache_settings = get_cache_settings(self.model)
        lookups = cache_settings.get("lookups")

        keys = kwargs.keys()
        single_kwarg_match = len(keys) == 1 and keys[0] in lookups
        multi_kwarg_match = len(keys) != 1 and any(
            sorted(keys) == sorted(lookup) for lookup in lookups if isinstance(lookup, (list, tuple))
        )
        if not single_kwarg_match and not multi_kwarg_match:
            raise ValueError("Caching not allowed with kwargs %s" % ", ".join(keys))

        # Get object from cache or db.
        key = generate_base_key(self.model, **kwargs)
        obj = cache.get(key)
        if obj is not None:
            if isinstance(obj, ObjectDoesNotExist):
                raise self.model.DoesNotExist(repr(obj))
            elif isinstance(obj, MultipleObjectsReturned):
                raise self.model.MultipleObjectsReturned(repr(obj))
            else:
                return obj
        try:
            obj = self.get(**kwargs)
        except (ObjectDoesNotExist, MultipleObjectsReturned), e:
            # The model-specific subclasses of these exceptions are not
            # pickleable, so we cache the base exception and reconstruct the
            # specific exception when fetching from the cache.
            obj = e.__class__.__base__(repr(e))
            cache.set(key, obj, cache_settings.get("timeout"))
            raise
예제 #6
0
def gsc_tag():
    hour = datetime.now().hour
    minute = datetime.now().minute
    gsc_table = cache.get('supertaste_tag_cache')
    if not gsc_table or \
        ((hour>0 and minute > 0) and (hour>0 and minute <= 1)) or ((hour>6 and minute > 0) and (hour>6 and minute <= 1)) or \
        ((hour>8 and minute > 0) and (hour>8 and minute <= 1)) or ((hour>10 and minute > 0) and (hour>10 and minute <= 1)) or \
        ((hour>12 and minute > 0) and (hour>12 and minute <= 1)) or ((hour>14 and minute > 0) and (hour>14 and minute <= 1)) or \
        ((hour>16 and minute > 0) and (hour>16 and minute <= 1)) or ((hour>18 and minute > 0) and (hour>18 and minute <= 1)) or \
        ((hour>20 and minute > 0) and (hour>20 and minute <= 1)) or ((hour>22 and minute > 0) and (hour>22 and minute <= 1)):
        tag_gsc = read_gzip.tmp_read('dict', 'supertaste')
        tag_gsc['search_content'] = tag_gsc["search_content"].map(
            lambda tag: tag.replace(' ', ','))
        tag_gsc['search_content'].replace('', np.nan, inplace=True)
        tag_gsc = tag_gsc.dropna(how='all')
        tag_gsc = tag_gsc.reset_index().rename(
            columns={tag_gsc.index.name: 'nid'})
        gsc_list = dataframe_to_json(tag_gsc)
        gsc_table = jsonify(gsc_list)
        gsc_table.status_code = 200
        cache.set('supertaste_tag_cache', gsc_table, timeout=7200)
        return gsc_table
    else:
        print('supertaste_tag_cache')
        return gsc_table
예제 #7
0
def tvbs_news_tag_analysis():
    hour = datetime.now().hour
    minute = datetime.now().minute
    args = request.args
    day = args.get('day') if 'day' in args else 90
    news_tag_summary = cache.get('news_tag_cache' + str(day))
    try:
        if (not news_tag_summary) or\
            ((hour>0 and minute > 0) and (hour>0 and minute <= 1)) or ((hour>6 and minute > 0) and (hour>6 and minute <= 1)) or \
            ((hour>8 and minute > 0) and (hour>8 and minute <= 1)) or ((hour>10 and minute > 0) and (hour>10 and minute <= 1)) or \
            ((hour>12 and minute > 0) and (hour>12 and minute <= 1)) or ((hour>14 and minute > 0) and (hour>14 and minute <= 1)) or \
            ((hour>16 and minute > 0) and (hour>16 and minute <= 1)) or ((hour>18 and minute > 0) and (hour>18 and minute <= 1)) or \
            ((hour>20 and minute > 0) and (hour>20 and minute <= 1)) or ((hour>22 and minute > 0) and (hour>22 and minute <= 1)):
            #            if (not news_tag_summary):
            print('Not cache')
            back_tag_of_dfItem = tra.cache_article_table(
                'news').get_aws_table_cache(day)
            tag_summary = tra.editorTag('news', back_tag_of_dfItem,
                                        'N').editor_tag_summary()
            summary_list = dataframe_to_json(tag_summary)
            news_tag_summary = jsonify(summary_list)
            news_tag_summary.status_code = 200
            cache.set('news_tag_cache' + str(day),
                      news_tag_summary,
                      timeout=7200)
            return news_tag_summary
        else:
            print('news_tag_cache')
            return news_tag_summary
    finally:
        print('request get /tvbs_news_tag_analysis')
예제 #8
0
def update_related_terms(sentiment_term):
    try:

        # get data from cache
        for i in range(100):
            related_terms = cache.get(
                'related_terms',
                sentiment_term)  # term: {mean sentiment, count}
            if related_terms:
                break
            time.sleep(0.1)

        if not related_terms:
            return None

        buttons = [
            html.Button('{}({})'.format(term, related_terms[term][1]),
                        id='related_term_button',
                        value=term,
                        className='btn',
                        type='submit',
                        style={
                            'background-color': '#4CBFE1',
                            'margin-right': '5px',
                            'margin-top': '5px'
                        }) for term in related_terms
        ]
        #size: related_terms[term][1], sentiment related_terms[term][0]

        sizes = [related_terms[term][1] for term in related_terms]
        smin = min(sizes)
        smax = max(sizes) - smin

        buttons = [
            html.H5('Terms related to "{}": '.format(sentiment_term),
                    style={'color': app_colors['text']})
        ] + [
            html.Span(
                term,
                className="chip",
                style={
                    'background-color':
                    sentiment_colors[round(related_terms[term][0] * 2) / 2],
                    'margin-right':
                    '15px',
                    'margin-top':
                    '15px',
                    'color':
                    '#FFFFFF',
                    'font-size':
                    '15px'
                }) for term in related_terms
        ]

        return buttons

    except Exception as e:
        with open('errors.txt', 'a') as f:
            f.write(str(e))
            f.write('\n')
예제 #9
0
    def get_cached(self, **kwargs):
        """Gets the model instance from the cache, or, if the instance is not in
        the cache, gets it from the database and puts it in the cache.
        """
        cache_settings = get_cache_settings(self.model)
        lookups = cache_settings.get("lookups")

        keys = kwargs.keys()
        single_kwarg_match = len(keys) == 1 and keys[0] in lookups
        multi_kwarg_match = len(keys) != 1 and any(
            sorted(keys) == sorted(lookup)
            for lookup in lookups if isinstance(lookup, (list, tuple)))
        if not single_kwarg_match and not multi_kwarg_match:
            raise ValueError("Caching not allowed with kwargs %s" %
                             ", ".join(keys))

        # Get object from cache or db.
        key = generate_base_key(self.model, **kwargs)
        obj = cache.get(key)
        if obj is not None:
            if isinstance(obj, ObjectDoesNotExist):
                raise self.model.DoesNotExist(repr(obj))
            elif isinstance(obj, MultipleObjectsReturned):
                raise self.model.MultipleObjectsReturned(repr(obj))
            else:
                return obj
        try:
            obj = self.get(**kwargs)
        except (ObjectDoesNotExist, MultipleObjectsReturned), e:
            # The model-specific subclasses of these exceptions are not
            # pickleable, so we cache the base exception and reconstruct the
            # specific exception when fetching from the cache.
            obj = e.__class__.__base__(repr(e))
            cache.set(key, obj, cache_settings.get("timeout"))
            raise
예제 #10
0
    def get(self, *args, **kwargs):
        """
        Checks the cache to see if there's a cached entry for this pk. If not, fetches 
        using super then stores the result in cache.

        Most of the logic here was gathered from a careful reading of 
        ``django.db.models.sql.query.add_filter``
        """
        if self.query.where:
            # If there is any other ``where`` filter on this QuerySet just call
            # super. There will be a where clause if this QuerySet has already
            # been filtered/cloned.
            return super(CachingQuerySet, self).get(*args, **kwargs)

        # Punt on anything more complicated than get by pk/id only...
        if len(kwargs) == 1:
            k = kwargs.keys()[0]
            if k in ('pk', 'pk__exact', '%s' % self.model._meta.pk.attname,
                     '%s__exact' % self.model._meta.pk.attname):
                obj = cache.get(self.model._cache_key(pk=kwargs.values()[0]))
                if obj is not None:
                    obj.from_cache = True
                    return obj

        # Calls self.iterator to fetch objects, storing object in cache.
        return super(CachingQuerySet, self).get(*args, **kwargs)
예제 #11
0
파일: managers.py 프로젝트: boar/boar
    def get(self, *args, **kwargs):
        """
        Checks the cache to see if there's a cached entry for this pk. If not, fetches 
        using super then stores the result in cache.
 
        Most of the logic here was gathered from a careful reading of 
        ``django.db.models.sql.query.add_filter``
        """
        if self.query.where and self.query.where != (None, []):
            # If there is any other ``where`` filter on this QuerySet just call
            # super. There will be a where clause if this QuerySet has already
            # been filtered/cloned.
            return super(CachingQuerySet, self).get(*args, **kwargs)
 
        # Punt on anything more complicated than get by pk/id only...
        if len(kwargs) == 1:
            k = kwargs.keys()[0]
            if k in ('pk', 'pk__exact', '%s' % self.model._meta.pk.attname, 
                     '%s__exact' % self.model._meta.pk.attname):
                obj = cache.get(self.model._cache_key(pk=kwargs.values()[0]))
                if obj is not None:
                    obj.from_cache = True
                    return obj
 
        # Calls self.iterator to fetch objects, storing object in cache.
        return super(CachingQuerySet, self).get(*args, **kwargs)
예제 #12
0
def tvbs_news_tag_analysis():
    hour = datetime.now().hour
    minute = datetime.now().minute
#    args = request.args
    search_console = request.args.get('gsc', 'Y', type = str)
    day = request.args.get('day',90 , type = int)
    news_tag_summary = cache.get('health_tag_cache'+str(day)+search_console)
    try:
        if (not news_tag_summary) or\
            ((hour>0 and minute > 0) and (hour>0 and minute <= 1)) or ((hour>6 and minute > 0) and (hour>6 and minute <= 1)) or \
            ((hour>8 and minute > 0) and (hour>8 and minute <= 1)) or ((hour>10 and minute > 0) and (hour>10 and minute <= 1)) or \
            ((hour>12 and minute > 0) and (hour>12 and minute <= 1)) or ((hour>14 and minute > 0) and (hour>14 and minute <= 1)) or \
            ((hour>16 and minute > 0) and (hour>16 and minute <= 1)) or ((hour>18 and minute > 0) and (hour>18 and minute <= 1)) or \
            ((hour>20 and minute > 0) and (hour>20 and minute <= 1)) or ((hour>22 and minute > 0) and (hour>22 and minute <= 1)):
#            if (not news_tag_summary):
            print('Not cache')
            back_tag_of_dfItem = tra.cache_article_table('health').get_aws_table_cache(day)
            if search_console =='Y':
                tag_summary = tra.editorTag('health',back_tag_of_dfItem,'Y').editor_tag_summary()
            else:
                tag_summary = tra.editorTag('health',back_tag_of_dfItem,'N').editor_tag_summary()
            summary_list = dataframe_to_json(tag_summary)
            news_tag_summary = jsonify(summary_list)
            news_tag_summary.status_code=200
            cache.set('health_tag_cache'+str(day)+search_console,news_tag_summary,timeout=7200)
            return news_tag_summary
        else:
            print('health_tag_cache')
            return news_tag_summary
    finally:
        print('request get /tvbs_health_tag_analysis')
예제 #13
0
def recommend_article():
    hour = datetime.now().hour
    minute = datetime.now().minute
    recommend_article_cache = cache.get('recommend_cache')
    try:
        if (not recommend_article_cache) or \
        ((hour>9 and minute > 0) and (hour>9 and minute <= 2)):
            print('Not cache')
            content = requests.get(
                f"http://127.0.0.1:8010/recommend_api/content_clear")
            top_rank = requests.get(
                f"http://127.0.0.1:8010/recommend_api/top_rank")
            content = content.json()
            top_rank = top_rank.json()
            recommend_article = qr.recommend_article(content,
                                                     top_rank["top_rank"])
            json_recommend_article = jsonify(recommend_article)
            json_recommend_article.status_code = 200
            cache.set('recommend_cache', json_recommend_article, timeout=86400)
            return (json_recommend_article)
        else:
            print('content_cache')
            return recommend_article_cache
    finally:
        print('request get /recommend_article')
예제 #14
0
def get_appid_balance(appid):
    real_pay_list = REAL_PAY.to_dict().keys()
    balance = 0.0
    for real_pay in real_pay_list:
        key = prefix_key('appid:%s:real_pay:%s' % (appid, real_pay))
        balance += float(cache.get(key) or 0)
    return balance
예제 #15
0
	def __html__(self):
		markedup = cache.get('markup_' + str(self.key()))
		if not markedup:
			markedup = markup(self.body, self.markup)
			cache.set('markup_' + str(self.key()), markedup)

		return markedup
예제 #16
0
def create_app():
    app = Flask(__name__)

    config = {
        "production": "ProductionConfig",
        "development": "DevelopmentConfig"
    }

    app = Flask(__name__)

    config_name = os.environ.get('APPLICATION_ENV', 'development')
    config_module = importlib.import_module('config.settings')
    config_class = getattr(config_module, config[config_name])
    app.config.from_object(config_class())
    dictConfig(config_class.LOGGING_CONFIG)

    cache.init_app(app)
    if cache.get('images'):
        pass
    else:
        images = get_all_images()
        cache.set("images", images)

    images_blueprint = Blueprint('images', __name__)
    app.register_blueprint(images_blueprint)

    api = Api(images_blueprint)

    api.add_resource(ImageSearchController, '/images/<string:search_term>')
    api.add_resource(ImageController, '/images')

    return app
예제 #17
0
파일: config.py 프로젝트: wodim/akari
    def _cache_get(self, section, key):
        if not self.cached:
            raise ConfigCacheMissError('caching is disabled')

        ret = cache.get('%s:%s' % (section, key))
        if not ret:
            raise ConfigCacheMissError('key not in cache')
        return ret
예제 #18
0
def sign_in():
    try:
        _json = request.json
        _code = _json['event code']
        _id = _json['member id']
        _lat = _json['lat']
        _long = _json['long']

        if not (_code and _id and _lat and _long):
            return not_found

        eventId = cache.get(_code)

        if not eventId:
            return goodResp("This is not a valid event code")

        if str(eventId) in session:
            return goodResp("You already signed into this event")

        conn = mysql.connect()
        cursor = conn.cursor(pymysql.cursors.DictCursor)
        cursor.execute(
            "SELECT event_start, event_end, event_lat, event_long FROM events WHERE event_id=%d"
            % eventId)
        rows = cursor.fetchall()

        if not rows:
            return goodResp("That is not a valid event code")

        event = rows[0]
        currTime = takeTime()

        if not (event["event_start"] <= currTime
                and currTime <= event["event_end"]):
            return goodResp("This event is not active")

        userCoords = (_lat, _long)
        eventCoords = (event["event_lat"], event["event_long"])
        dist = geopy.distance.vincenty(userCoords, eventCoords).miles
        if not (dist <= 0.068):
            return goodResp("You are not within the event range")

        cursor.execute(
            "SELECT attendance_time_in FROM attendance WHERE attendance_event_id=%d AND attendance_member_id=%d"
            % (eventId, _id))
        rows = cursor.fetchall()

        if rows:
            return goodResp("You already signed into this event")

        cursor.execute(
            "insert into attendance (attendance_event_id, attendance_member_id, attendance_time_in) values (%d, %d, %d)"
            % (eventId, _id, currTime))
        conn.commit()
        session[str(eventId)] = True
        return goodResp("Successfully signed in")
    except Exception as e:
        print(e)
예제 #19
0
파일: views.py 프로젝트: zanglang/libcoffee
def sitemap():
	"""Returns a Sitemap.xml of all publised blog posts"""

	xml = cache.get('blog_sitemaps')
	if not xml:
		logging.warning('Regenerating sitemaps.xml...')
		xml = generate_sitemap()

	return Response(xml, mimetype='text/xml')
예제 #20
0
def ec_content():
    minute = datetime.now().minute
    table = cache.get('ec_content')
    if not table or (minute in [0, 15, 30, 45]):
        content = get_data('ec_content')
        table = content.to_json(force_ascii=False)
        cache.set('ec_content', table, timeout=3600)
        return table
    else:
        return table
예제 #21
0
def get_jsapi_ticket():
    key = key_of_jsapi_ticket()
    jsapi_ticket = cache.get(key)
    if not jsapi_tiket:
        ticket = wechat_client.get_jsapi_ticket()
        jsapi_ticket = data['jsapi_ticket']
        expires_at = data['jsapi_ticket_expires_at']
        timeout = expires_at - int(time.time())
        cache.set(key, jsapi_ticket, timeout)
    return jsapi_ticket
예제 #22
0
def content_update():
    minute = datetime.now().minute
    table = cache.get('supertaste_update')
    if not table or (minute in [0, 15, 30, 45]):
        content = get_data('supertaste_update', 1080)
        table = content.to_json(force_ascii=False)
        cache.set('supertaste_update', table, timeout=3600)
        return table
    else:
        return table
예제 #23
0
def get_article(url):
    data = None
    if cache.get(url):
        data = cache.get(url)
        return json.loads(data)
    else:
        parsed_url = urlparse(url)
        article = download_and_parse_article(url)
        encodable = {
            'hostname': parsed_url.hostname,
            'article_html': article.article_html,
            'title': article.title,
            'text': article.text,
            'summary': article.summary,
            'tags': list(article.tags),
            'top_image': article.top_image,
            'authors': article.authors,
            'keywords': article.keywords
        }
        return encodable
예제 #24
0
def gsc():
    #        now = datetime.now()
    #        now_time = now.time()
    table = cache.get('gsc')
    if not table:
        tmp = get_data('gsc', domain='Health')
        table = tmp.to_json(force_ascii=False)
        #            cache.set('news_content_tmp',content,timeout=86100)
        cache.set('gsc', table, timeout=86100)
        return table
    else:
        return table
예제 #25
0
	def __html__(self):
		markedup = cache.get('markup_' + str(self.key()))
		if not markedup:
			# check markup library?
			func = current_app.jinja_env.filters.get('markup')
			if not func:
				return self.content

			markedup = func(self.content, 'Markdown')
			cache.set('markup_' + str(self.key()), markedup)

		return markedup
예제 #26
0
    def _get_details(self):
        details = cache.get("IPDetails:{}".format(self.address))
        if details:
            return details

        details = {"ASN": None, "Holder": None, "Prefix": None}

        if self.ip_object.iptype() in ['RESERVED', 'UNSPECIFIED', 'LOOPBACK',
                                       'UNASSIGNED', 'DOCUMENTATION', 'ULA',
                                       'LINKLOCAL', 'PRIVATE']:
            return details

        found = False
        for cache_entry in cache.keys():
            if cache_entry.startswith("IPDetailsPrefix:"):
                prefix_details = cache.get(cache_entry)
                prefix = IPy.IP(prefix_details["Prefix"])

                if self.ip_object in prefix:
                    details = prefix_details
                    found = True
                    break

        if not found:
            URL = IP.RIPESTAT_URL.format(ip=self.address)

            res = json.loads(urllib2.urlopen(URL).read())

            if res["status"] == "ok":
                if res["data"]["asns"] != []:
                    details["ASN"] = str(res["data"]["asns"][0]["asn"])
                    details["Holder"] = res["data"]["asns"][0]["holder"]
                    details["Prefix"] = res["data"]["resource"]

                    cache.set("IPDetailsPrefix:{}".format(details["Prefix"]),
                              details, 60*60*24*7)

        cache.set("IPDetails:{}".format(self.address), details, 60*60*24*7)

        return details
예제 #27
0
def index():
    client_id = request.cookies.get('client_id', False)
    res = make_response(render_template('index.html'))
    if not client_id:
        client_id = set_client_id()
        res.set_cookie('client_id', client_id)
    cache_api = cache.get(client_id)
    if cache_api is None:
        client_api = API()
        cache.set(client_id, client_api)
    else:
        cache.set(client_id, cache_api, timeout=60 * 60 * 24 * 1)
    return res
예제 #28
0
 def get(self, station_id=None):
     if station_id == None:
         station_id = app.config['STATION_ID']
     weather_data = cache.get(
         str('station_%s_currents' % station_id)
     )
     if weather_data != None:
         weather_data = eval(weather_data)
     response = jsonify(
         weather_data
         )
     response.headers.add('Cache-Control', 'max-age=%s' % app.config['CACHE_TIMEOUT'])
     return response
예제 #29
0
 def all(self):
     key = instance._get_cache_key(field=field_name)
     qs = super(CachingRelatedManager, self).get_query_set()
     PKListQuerySet = get_pk_list_query_set(qs.__class__)
     qs = qs._clone(klass=PKListQuerySet)
     pk_list = cache.get(key)
     if pk_list is None:
         pk_list = qs.values_list('pk', flat=True)
         cache.add(key, pk_list, CACHE_DURATION)
     else:
         qs.from_cache = True
     qs.pk_list = pk_list
     return qs
예제 #30
0
 def all(self):
     key = instance._get_cache_key(field=field_name)
     qs = super(CachingRelatedManager, self).get_query_set()
     PKListQuerySet = get_pk_list_query_set(qs.__class__)
     qs = qs._clone(klass=PKListQuerySet)
     pk_list = cache.get(key)
     if pk_list is None:
         pk_list = list(qs.values_list('pk', flat=True))
         cache.add(key, pk_list, CACHE_DURATION)
     else:
         qs.from_cache = True
     qs.pk_list = pk_list
     return qs
예제 #31
0
def dislike_undo():
    username = cache.get("username")
    article_id = request.form["id"]
    doc_type = request.form["doc_type"]
    delete_user_feedback(username, "dislike", article_id)
    entries = get_users_likes(username)
    script = {"script": {"inline": "ctx._source.dislike -= 1"}}
    es.update(index="testindex", doc_type=doc_type, body=script, id=article_id)
    logs = []
    for entry in entries:
        logs.append(entry["id"])
    cache.set(username + "_like_log", logs)
    return jsonify({})
예제 #32
0
def getContent():
    words = request.form.get('words', False)
    client_id = request.form.get('client_id', False)
    if not words:
        return {'status': 0, 'message': '请输入一句话!'}
    if not client_id:
        return {'status': 0, 'message': '系统缓存丢失, 将会自动刷新页面, 点击继续'}

    cache_api: API = cache.get(client_id)
    if cache_api is None:
        return {'status': 0, 'message': '系统缓存丢失, 将会自动刷新页面, 点击继续'}

    res = cache_api.getContent(words)
    return {'status': 1, 'message': res}
예제 #33
0
def check_count(key, max_sent, max_count, forbid_time, default_expire):
    count = cache.get(key)
    if count is not None:
        cache.incr(key)
        if int(count) < max_sent:
            return True
        elif int(count) >= max_count:
            cache.expire(key, forbid_time)
            return False
        else:
            return False
    else:
        cache.setex(key, default_expire, 1)
        return True
예제 #34
0
def update_sentiment_pie(sentiment_term):
    # get data from cache
    # for i in range(100):
    #     sentiment_pie_dict = {}cache.get("")
    #     if sentiment_pie_dict:
    #         break
    #     time.sleep(0.1)
    # sentiment_pie_dict = [99,2]

    # if not sentiment_pie_dict:
    #     return None
    if sentiment_term=="":
        sentiment_term = "all"
    labels = ['Positive','Negative']
    try:
        pos = int(cache.get("positive_count").decode("utf8"))
        neg = int(cache.get("negative_count").decode("utf8"))
        neu = int(cache.get("neutral_count").decode("utf8"))
    except AttributeError:
        # the cache is not initialized yet
        historical_df = database.get_recent_reddit(sentiment_term, num=100000)
        pos, neg, neu = update_pos_neg_neutral_cache(sentiment_term, historical_df)
        del historical_df
    values = [pos,neg]
    colors = ['#007F25', '#800000']

    # the hoverinfo is shown when you hover on the chart
    # the textinfo is shown on the chart when not hovered
    trace = go.Pie(labels=labels,values=values,hoverinfo="label+percent",textinfo="value")

    return {"data":[trace],
            'layout':go.Layout(
                    title='Positive vs Negative sentiment for "{}" (historical)'.format(sentiment_term),
                    showlegend=True
                    )
            }
    def get(self):
        """
        Get method to obtain the payload from the launch_id
        :param launch_id
        :return:
        """
        token = request.values['token']
        logs.api_logger.info("DeepLinking Payload Data: " + str(token),
                             extra={
                                 "clientip": request.remote_addr,
                                 'path': request.path,
                                 "user": request.remote_user
                             })
        payload = cache.get(token)

        return payload
예제 #36
0
def like():
    username = cache.get("username")
    title = request.form["title"]
    article_id = request.form["id"]
    doc_type = request.form["doc_type"]
    print(doc_type)
    date = datetime.now().strftime("%Y/%m/%d %H:%M:%S")
    insert_user_feedback(username, title, date, "like", article_id)
    entries = get_users_likes(username)
    script = {"script": {"inline": "ctx._source.like +=1"}}
    es.update(index="testindex", doc_type=doc_type, body=script, id=article_id)
    logs = []
    for entry in entries:
        logs.append(entry["id"])
    cache.set(username + "_like_log", logs)
    return jsonify({})
예제 #37
0
def get_reservation_forecast(keys):
    if not isinstance(keys, list):
        keys = list(keys)
    keys = set(
        models.AWSKey.query.filter_by(
            key=k).first() if isinstance(k, basestring) else k for k in keys)
    if not all(isinstance(k, models.AWSKey) for k in keys):
        raise TypeError('All keys must be strings or AWSKeys.')
    cache_key = 'get_reservation_forecast#' + models.MultikeyGroup.id_of(keys)
    cached = cache.get(cache_key)
    if cached:
        unpacked = decompressed_json(cached)
    else:
        unpacked = compute_reservation_forecast(keys)
        cache.setex(cache_key, 12 * 60 * 60, compressed_json(unpacked))
    return unpacked
예제 #38
0
def update_pie_chart(sentiment_term, n_intervals):

    # get data from cache
    for i in range(100):
        sentiment_pie_dict = cache.get('sentiment_shares', sentiment_term)
        if sentiment_pie_dict:
            break
        time.sleep(0.1)

    if not sentiment_pie_dict:
        return None

    labels = ['Positive', 'Negative']

    try:
        pos = sentiment_pie_dict[1]
    except:
        pos = 0

    try:
        neg = sentiment_pie_dict[-1]
    except:
        neg = 0

    values = [pos, neg]
    colors = ['#007F25', '#800000']

    trace = go.Pie(labels=labels,
                   values=values,
                   hoverinfo='label+percent',
                   textinfo='value',
                   textfont=dict(size=20, color=app_colors['text']),
                   marker=dict(colors=colors,
                               line=dict(color=app_colors['background'],
                                         width=2)))

    return {
        "data": [trace],
        'layout':
        go.Layout(
            title='Positive vs Negative sentiment for "{}" (longer-term)'.
            format(sentiment_term),
            font={'color': app_colors['text']},
            plot_bgcolor=app_colors['background'],
            paper_bgcolor=app_colors['background'],
            showlegend=True)
    }
예제 #39
0
def content_clear():
    hour = datetime.now().hour
    minute = datetime.now().minute
    content_cache = cache.get('content_cache')
    try:
        if (not content_cache) or \
        ((hour>9 and minute > 0) and (hour>9 and minute <= 2)):
            print('Not cache')
            dict_sc = qr.cleaning_content()
            json_dict_sc = jsonify(dict_sc)
            json_dict_sc.status_code = 200
            cache.set('content_cache', json_dict_sc, timeout=86400)
            return (json_dict_sc)
        else:
            print('content_cache')
            return content_cache
    finally:
        print('request get /content_clear')
    def process_request(self, request):

        # Only run if this is a POST request
        if not request.method == 'POST':
            return None

        url = request.get_full_path()

        # Only operate on if the view is marked as enabling sticky files
        if not ('file_upload_cache' in request.session and
                url in request.session['file_upload_cache']):
            return None

        # Retrieve any previous request.FILES
        restored_files_dict = cache.get(request.session.session_key)
        if restored_files_dict:
            restored_files_dict.update(request.FILES)

            # Merge current request.FILES with anything we had previously
            request._files = restored_files_dict

        # Save request.FILES for subsequent requests
        if request.FILES:
            cache.set(request.session.session_key, request.FILES)