Exemple #1
0
    def get_stock_by_name(self, name):
        """
        get stocks from the redis with pattern matching and return in dict format
        :param
            name: name to search
        :return:
            success response : {"status":1,"data":"--dict of stocks--"}
            fail response : {"status":0,"data":"--error message--"}
        """
        res = {"status": 0, "data": ""}
        try:
            redis_conn_res = get_redis_connection()
            if not redis_conn_res["status"]:
                return redis_conn_res
            redis_conn = redis_conn_res["data"]

            keys = redis_conn.scan_iter(match='STOCK:*' + str(name).upper() +
                                        '*')
            stacks = []
            print(type(keys))
            for key in keys:
                reg = redis_conn.hgetall(key)
                stacks.append(reg)
            print(stacks)
            res["status"], res["data"] = 1, stacks
        except Exception as e:
            traceback.print_exc()
            res["data"] = "Something went wrong, Kindly try again."
        return res
Exemple #2
0
 def delay(*args, **kwargs):
     db = config.get_redis_connection()
     qkey = settings.TASK_QUEUE_NAME
     task_id = str(uuid.uuid4())
     key = '{0}:{1}'.format(qkey, task_id)
     s = pickle.dumps((f, task_id, args, kwargs))
     db.rpush(settings.TASK_QUEUE_NAME, s)
     return DelayedResult(task_id)
Exemple #3
0
    def get_top_stocks(self, page_no):
        """
        get top stocks from the redis and return in dict format
        :param
            page_no: page no for pagination
        :return:
            success response : {"status":1,"data":"--dict of stocks--","count":"--total count of stocks--"}
            fail response : {"status":0,"data":"--error message--"}
        """
        res = {"status": 0, "data": ""}
        try:
            redis_conn_res = get_redis_connection()
            if not redis_conn_res["status"]:
                return redis_conn_res
            redis_conn = redis_conn_res["data"]

            total_count = 0
            if page_no == 0:
                total_count = redis_conn.zcount(
                    "search_sorted", "-inf", "+inf"
                )  #the number of elements in the specified score range.
                if total_count == 0:
                    """
                    If  data is not loaded, call parser to load it.
                    """
                    res = self.get_latest_stocks()
                    if res["status"]:
                        total_count = redis_conn.zcount(
                            "search_sorted", "-inf", "+inf")
                    else:
                        return res
            start_index = page_no * self.pagination_size
            end_index = start_index + self.pagination_size - 1

            keys = redis_conn.zrevrange("search_sorted",
                                        start_index,
                                        end_index,
                                        withscores=False)
            top_stacks = []
            for key in keys:
                reg = redis_conn.hgetall(key)
                top_stacks.append(reg)
            print(top_stacks)
            date = redis_conn.get("latest_date")
            res["status"], res["data"], res["count"], res[
                "date"] = 1, top_stacks, total_count, date
        except Exception as e:
            traceback.print_exc()
            res["data"] = "Something went wrong, Kindly try again."
        return res
Exemple #4
0
    def get_top_stocks(self, page_no):
        res = {"status": 0, "data": ""}
        try:
            redis_conn_res = get_redis_connection()
            if not redis_conn_res["status"]:
                return redis_conn_res
            redis_conn = redis_conn_res["data"]

            total_count = 0
            if page_no == 0:
                total_count = redis_conn.zcount("search_sorted", "-inf",
                                                "+inf")
                if total_count == 0:
                    """
                    If data is not loaded, call parser to load it.
                    """
                    res = self.get_latest_stocks()
                    if res["status"]:
                        total_count = redis_conn.zcount(
                            "search_sorted", "-inf", "+inf")
                    else:
                        return res
            start_index = page_no * self.pagination_size
            end_index = start_index + self.pagination_size - 1

            keys = redis_conn.zrevrange("search_sorted",
                                        start_index,
                                        end_index,
                                        withscores=False)
            top_stacks = []
            for key in keys:
                reg = redis_conn.hgetall(key)
                top_stacks.append(reg)
            print(top_stacks)
            date = redis_conn.get("latest_date")
            res["status"], res["data"], res["count"], res[
                "date"] = 1, top_stacks, total_count, date
        except Exception as e:
            traceback.print_exc()
            res["data"] = "Something went wrong, Kindly try again."
        return res
Exemple #5
0
    def get_stock_by_name(self, name):
        res = {"status": 0, "data": ""}
        try:
            redis_conn_res = get_redis_connection()
            if not redis_conn_res["status"]:
                return redis_conn_res
            redis_conn = redis_conn_res["data"]

            keys = redis_conn.scan_iter(match='STOCK:*' + str(name).upper() +
                                        '*')
            stacks = []
            print(type(keys))
            for key in keys:
                reg = redis_conn.hgetall(key)
                stacks.append(reg)
            print(stacks)
            res["status"], res["data"] = 1, stacks
        except Exception as e:
            traceback.print_exc()
            res["data"] = "Something went wrong, Kindly try again."
        return res
Exemple #6
0
def queue_daemon(app, rv_ttl=settings.TASK_QUEUE_KEY_TTL):
    db = config.get_redis_connection()
    while True:
        msg = db.blpop(settings.TASK_QUEUE_NAME)
        print('Running task: {0}'.format(msg))
        func, task_id, args, kwargs = pickle.loads(msg[1])
        qkey = settings.TASK_QUEUE_NAME
        key = '{0}:{1}'.format(qkey, task_id)
        data = {'date': time.time(), 'task_id': task_id, 'status': 'running', 'result': None}
        db.set(key, json.dumps(data))
        try:
            rv = func(*args, **kwargs)
            data['status'] = 'complete'
        except Exception, e:
            import traceback
            traceback.print_exc()
            rv = e
            data['status'] = 'error'
        if not isinstance(rv, dict):
            rv = str(rv)
        data['result'] = rv
        if rv is not None:
            db.set(key, json.dumps(data))
            db.expire(key, rv_ttl)
Exemple #7
0
 def __init__(self, key):
     self.db = config.get_redis_connection()
     self.key = key
     self._rv = None
Exemple #8
0
def task_status(task_id=None):
    rds = get_redis_connection()
    v = rds.get('{0}:{1}'.format(app.config.get('TASK_QUEUE_NAME'), task_id))
    if not v:
        v = jsonify({'status': 'fail', 'result': 'invalid task'})
    return v
Exemple #9
0
from flask import Blueprint
from flask import request, jsonify, session, Response, json
from config import create_app, get_redis_connection
from decorators import api_key_required
import tempfile
from utils.applications import deploy_application, delete_application
import time

app = create_app()
bp = api_blueprint = Blueprint('api', __name__)
rds = get_redis_connection()

def generate_task_response(task_id=None):
    return jsonify({'status': 'ok', 'task_id': task_id})

def wait_for_task(task_id=None):
    key = '{0}:{1}'.format(app.config.get('TASK_QUEUE_NAME'), task_id)
    while True:
        try:
            if json.loads(rds.get(key))['status'] != 'running':
                break
        except:
            # if task hasn't started, the json.loads will fail because the task
            # data is still pickled from the queue
            pass
        time.sleep(1)
    return rds.get(key)

@bp.route('')
def index():
    data = {
Exemple #10
0
import json
import logging

from config import get_multivac_db, get_redis_connection
from flask import Blueprint, request, Response
from multivac.process_data import process_data
from rq import Queue

db = get_multivac_db()
redis_connection = get_redis_connection()

log = logging.getLogger(__name__)
log.setLevel(logging.DEBUG)
log.debug(db)

multivac_bp = Blueprint('multivac', __name__, template_folder="templates")


@multivac_bp.route("/")
def homepage():
    return "Welcome to application!"


@multivac_bp.route("/multivac", methods=['GET'])
def get_multivac():
    n_answer = db.entropy.find().count()
    if n_answer == 0:
        return "INSUFFICIENT DATA FOR MEANINGFUL ANSWER."
    else:
        answer = db.entropy.find_one()['data']
        return answer
Exemple #11
0
    def load_zip_to_redis(self):

        res = {"status": 0, "data": ""}
        try:
            #Fetch Zip URL
            zip_url_res = self.get_zip_url()
            if not zip_url_res["status"]:
                return zip_url_res

            # fetch and extract zip file
            csv_file_res = self.extract_csv_file(zip_url_res['data'])
            if not csv_file_res["status"]:
                return csv_file_res
            csv_path = csv_file_res["data"]
            csv_file_name = csv_file_res["name"]
            csv_file_date = csv_file_name[2:4] + "-" + csv_file_name[
                4:6] + "-20" + csv_file_name[6:8]

            # Get redis connection
            redis_conn_res = get_redis_connection()
            if not redis_conn_res["status"]:
                return redis_conn_res
            redis_conn = redis_conn_res["data"]

            loaded_data_date = redis_conn.get("latest_date")
            if csv_file_date == loaded_data_date:
                res["data"] = "Database already have latest data."
                return res

            # Open csv file and read it
            csv_list = csv.DictReader(open(csv_path, 'r'))
            # start redis pipeline, with transaction
            redis_pipeline = redis_conn.pipeline(transaction=True)
            redis_pipeline.flushdb()
            for row in csv_list:
                #
                stripped_key = "STOCK:" + row['SC_CODE'].rstrip(
                ) + ":" + row['SC_NAME'].rstrip()
                value = {
                    'name': row['SC_NAME'].rstrip(),
                    'code': row['SC_CODE'],
                    'open': float(row['OPEN']),
                    'close': float(row['CLOSE']),
                    'high': float(row['HIGH']),
                    'low': float(row['LOW'])
                }
                percentage = round(
                    ((value['close'] - value['open']) / value['open']) * 100,
                    2)
                value['percentage'] = round(percentage, 2)
                redis_pipeline.hmset(stripped_key, dict(value))

                #Assuming logic of top 10 stock entries must be "Highest positive percentage movement first"
                #Using sorted set and putting percentage as score
                redis_pipeline.zadd("search_sorted",
                                    {stripped_key: percentage})

            #Storing date for withc bhavcopy has been loaded to redis
            print(
                "latest_date", csv_file_name[2:4] + "-" + csv_file_name[4:6] +
                "-20" + csv_file_name[6:8])
            redis_pipeline.set("latest_date", csv_file_date)

            #execute the pipeline
            redis_pipeline.execute()

            res["status"] = 1
            res["data"] = "Done."
        except Exception as e:
            traceback.print_exc()
            res["data"] = "Something went wrong, Kindly try again."
        return res