def get_client_controller(config):

    logger = log.get_logger("get_client_controller")
    client_b = Blueprint("client", __name__)
    client_service = service.Client(config)

    @client_b.route("/list/<int:year>", methods=["get"])
    def get_clients(year):
        return send_response("success", "get_clients", client_service.get_list, year=year)

    @client_b.route("/", methods=["post", "put"])
    def create_client():
        if request.method == 'PUT':
            return send_response("Creación exitosa", "create_client", client_service.create, request=request)
        if request.method == 'POST':
            return send_response("Actualización exitosa", "create_client", client_service.update, request=request)

    def send_error(error_msg, func_name, data):
        error = {"error": f"{func_name} error: {str(error_msg)}", "data": data}, 400
        logger.info(error[0]["error"])
        return error

    def send_response(success_message, func_name, func, **args):
        try:
            data = func(**args)
            return Response(json.dumps({"data": data, "message": success_message}), mimetype='application/json')
        except Exception as ex:
            return send_error(ex, func_name, [])

    return client_b
 def __init__(self, descriptor, stage, s3_config, s3_thor_config):
     self.descriptor = descriptor
     self.s3_config = s3_config
     self.s3_thor_config = s3_thor_config
     self.logger = log.get_logger(self.descriptor.data_descriptor['name'])
     self.s3_client = None
     self.s3_thor_client = None
     self.channel_controller = None
     self.get_info = None
     self.get_dictionary = None
     self.monitor_client = None
     self.process_uuid = None
     self.rds_controller = None
     self.count_data_sent = 0
     self.stage = stage['env']
     self.obj_fixed_prices = {}
Exemplo n.º 3
0
from infrastructure import log
from datetime import date, timedelta

logger = log.get_logger("domain")

class Domain:

    def __init__(self, mongo, download):
        self.mongo = mongo
        self.download = download

    def download_and_save_stock_current_data(self, stock):
        quote = stock["symbol"]
        logger.info("stock %s", quote)
        stock_current_data = self.download.get_stock_current_data(quote)
        self.mongo.upsert_stock_current_data(quote, stock_current_data)

    def download_and_save_stock_historical_data(self, initialDate, finalDate, stock):
        quote = stock["symbol"]
        logger.info('stocks %s, %s, %s', initialDate, finalDate, quote)
        stock_historical_data_array = self.download.get_stock_historical_data(initialDate, finalDate, quote)
        self.mongo.save_stock_historical_data(quote, stock_historical_data_array)

    def stock_exists(self, quote):
        return self.mongo.stock_exists(quote)

    def get_stock_list(self):
        return self.mongo.read_stocks_from_stock_list()

    def add_stock_to_stock_list(self, stock):
        self.mongo.save_stock_list([stock])
Exemplo n.º 4
0
from flask import Flask
from flask_cors import CORS
from api.client import controller as client
from api import healthCheck as health
from infrastructure import config
import infrastructure.log as log

logger = log.get_logger("Main")

app_name = "cartera"
prefix = f'/api/{app_name}'
app = Flask(__name__)
cors = CORS(app)

configData = config.Config()

health_check_controller = health.get_health_check_controller()
client_controller = client.get_client_controller(configData)

app.register_blueprint(health_check_controller, url_prefix=f'{prefix}')
app.register_blueprint(client_controller, url_prefix=f'{prefix}/client')

if __name__ == "__main__":
    logger.info("--------------- Init Cartera Api -------------")
    app.run(port=80, host="0.0.0.0")
import sys
import threading
import uuid
import datetime
from infrastructure import config, log
import Manager.controller as retailer_controller
from Descriptor import master

LOG_FORMAT = ('%(levelname) -10s %(asctime)s %(name) -30s %(funcName) '
              '-35s %(lineno) -5d: %(message)s')
LOGGER = logging.getLogger(__name__)

# allow track all `logging` instances
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)

logger = log.get_logger("main")
jobs_running = {}


def get_config():
    config_path = sys.argv[1]
    return config.get_config(config_path)


def schedule_jobs(configuration):

    # Configuration
    s3_config = configuration['s3']
    s3_thor_config = configuration['s3_thor']
    stage = configuration['stage']
Exemplo n.º 6
0
import urllib
import requests
from infrastructure import json
from infrastructure import log

logger = log.get_logger("download")

class Download:

    def get_stock_historical_data(self, initialDate, finalDate, quote):
        stock_historical_data_array = []
        base_url = "https://query.yahooapis.com/v1/public/yql"
        select_statement = 'select * from yahoo.finance.historicaldata where symbol = "{quote}" and startDate = "{initialDate}" and endDate = "{finalDate}"'.format(quote=quote, initialDate=initialDate, finalDate=finalDate)
        query = urllib.parse.quote(select_statement)
        result_format = 'json'
        env = urllib.parse.quote("store://datatables.org/alltableswithkeys")
        url = base_url + "?" + "q=" + query + "&format=" + result_format + "&env=" + env
        response = requests.get(url)
        status_code = response.status_code
        body = response.json()
        if status_code == 200:
            count = body["query"]["count"]
            if count > 0:
                stock_historical_data_array = body["query"]["results"]["quote"]
                stock_historical_data_array = json.json_keys_to_lower_and_snake_case(stock_historical_data_array)
        else:
            logger.error("get_stock_historical_data: status_code: %i, body: %s", status_code, body)
        return stock_historical_data_array

    def get_stock_current_data(self, quote):
        stock_current_data = None
Exemplo n.º 7
0
from infrastructure import log
from datetime import date, timedelta

logger = log.get_logger("domain")


class Domain:
    def __init__(self, mongo, download):
        self.mongo = mongo
        self.download = download

    def download_and_save_stock_current_data(self, stock):
        quote = stock["symbol"]
        logger.info("stock %s", quote)
        stock_current_data = self.download.get_stock_current_data(quote)
        self.mongo.upsert_stock_current_data(quote, stock_current_data)

    def download_and_save_stock_historical_data(self, initialDate, finalDate,
                                                stock):
        quote = stock["symbol"]
        logger.info('stocks %s, %s, %s', initialDate, finalDate, quote)
        stock_historical_data_array = self.download.get_stock_historical_data(
            initialDate, finalDate, quote)
        self.mongo.save_stock_historical_data(quote,
                                              stock_historical_data_array)

    def stock_exists(self, quote):
        return self.mongo.stock_exists(quote)

    def get_stock_list(self):
        return self.mongo.read_stocks_from_stock_list()
Exemplo n.º 8
0
import threading

from flask import request, Blueprint, jsonify
from infrastructure import log

logger = log.get_logger("stocks_api")


def get_stocks_blueprint(domain, job, time_series):
    stocks_blueprint = Blueprint('stocks_api', __name__)

    @stocks_blueprint.route('', methods=['POST'])
    def add_stock():
        response = None
        new_stock = request.get_json()
        logger.info("post: %s", new_stock)
        if new_stock is None:
            response = jsonify({
                "error":
                "Please provide a stock in the request body. It should have a name, a symbol and a stock market"
            }), 400
            return response
        name = new_stock.get("name", None)
        quote = new_stock.get("symbol", None)
        stock_market = new_stock.get("stockMarket", None)
        is_valid_stock = name and quote and stock_market
        if not is_valid_stock:
            response = jsonify({
                "error":
                "Please provide a valid stock. It should have a name, a symbol and a stock market"
            }), 400
Exemplo n.º 9
0
import datetime
from concurrent.futures import ThreadPoolExecutor
from infrastructure import log

logger = log.get_logger("time_series")


class TimeSeries:

    DB_NAME = "stockreaderdb"
    NUMBER_OF_WORKERS = 5

    def __init__(self, influx_client):
        self.influx = influx_client
        self.influx.create_database(self.DB_NAME)
        self.influx.switch_database(self.DB_NAME)
        self.thread_pool = ThreadPoolExecutor(
            max_workers=self.NUMBER_OF_WORKERS)

    def save_async(self, measurement, tags_dict, fields_dict):
        data = {
            "measurement": measurement,
            "tags": tags_dict,
            "time": datetime.datetime.now().isoformat(),
            "fields": fields_dict
        }
        points = [data]
        self.thread_pool.submit(self.influx.write_points, points)
Exemplo n.º 10
0
import csv
from infrastructure import log

logger = log.get_logger("read")


class Read:
    def read_stocks_from_file(self, file_path, stock_market):
        stocks = []
        stocks_file = open(file_path)
        reader = csv.reader(stocks_file)
        next(reader)  # Skip the first headers row.
        for row in reader:
            first_array = row[0].split("(")
            name = first_array[0].strip()
            second_array = first_array[1].split(")")
            quote = second_array[0].strip()
            stock = {
                "name": name,
                "symbol": quote,
                "stockMarket": stock_market
            }
            stocks.append(stock)
        return stocks

    def read_stocks_from_multiple_files(self, file_path_list, stock_market):
        stocks = []
        for file_path in file_path_list:
            stocks.extend(self.read_stocks_from_file(file_path, stock_market))
        return stocks
Exemplo n.º 11
0
import psycopg2
import psycopg2.extras

from infrastructure import log

logger = log.get_logger("Postgres")


class Connector:
    def __init__(self, config):
        self.host = config['hostname']
        self.database = config['database']
        self.user = config['username']
        self.password = config['password']
        self.connection = None

    def connect(self):
        i = 1
        while not self.connection:
            try:
                self.connection = psycopg2.connect(host=self.host,
                                                   database=self.database,
                                                   user=self.user,
                                                   password=self.password)
            except Exception as e:
                i += 1
                logger.info("Error postgres connection " + str(e))
                logger.info("Connect postgres " + str(i))

            if i > 10:
                break
Exemplo n.º 12
0
 def __init__(self, config):
     self.logger = log.get_logger("Client")
     self.config = config
     self.repo = repository.Repo(self.config.POSGRES)
     self.client_attrs = ["id", "name", "payment"]
     client1 = {
         "id": 'wxctfvygbhnjmkl',
         "name": 'Vilariño',
         "subs_value": 300000,
         "period": {
             "year":
             2020,
             "months": [
                 {
                     "id": 1,
                     "value": 300000,
                     "isPay": True
                 },
                 {
                     "id": 2,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 3,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 4,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 5,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 6,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 7,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 8,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 9,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 10,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 11,
                     "value": 300000,
                     "isPay": False
                 },
                 {
                     "id": 12,
                     "value": 300000,
                     "isPay": False
                 },
             ]
         }
     }
     client2 = {
         "id":
         'tfcvygbhunjimokjhg',
         "name":
         'Ormeño',
         "subs_value":
         300000,
         "periods": [
             {
                 "month": 1,
                 "value": 300000,
                 "isPay": True,
                 "year": 2020
             },
             {
                 "id": 2,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 3,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 4,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 5,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 6,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 7,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 8,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 9,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 10,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 11,
                 "value": 300000,
                 "isPay": False
             },
             {
                 "id": 12,
                 "value": 300000,
                 "isPay": False
             },
         ],
     }
     self.clients = [client1, client2]
Exemplo n.º 13
0
import pymongo
from pymongo import MongoClient
from pymongo.errors import DuplicateKeyError, BulkWriteError
from infrastructure import log

logger = log.get_logger("mongo")

class Mongo:

    SYMBOL_KEY = "symbol"
    DATE_KEY = "date"

    TRADING_DAYS_PER_YEAR = 252

    STOCK_LIST = "stocklist"
    STOCKS_CURRENT_DATA = "stocks_current_data"
    HISTORICAL_DATA_SUFIX = "_historical_data"

    def collection_exists(self, collection_name):
        return collection_name in self.db.collection_names()

    def create_regular_collection_if_not_exists(self, collection_name, index_key):
        if not self.collection_exists(collection_name):
            collection = self.db[collection_name]
            collection.create_index([(index_key, pymongo.ASCENDING)], unique=True)

    def create_historical_collection_if_not_exists(self, collection_name):
        if not self.collection_exists(collection_name):
            collection = self.db[collection_name]
            collection.create_index([(self.SYMBOL_KEY, pymongo.ASCENDING), (self.DATE_KEY, pymongo.DESCENDING)], unique=True)
Exemplo n.º 14
0
import csv
from infrastructure import log

logger = log.get_logger("read")

class Read:

    def read_stocks_from_file(self, file_path, stock_market):
        stocks = []
        stocks_file = open(file_path)
        reader = csv.reader(stocks_file)
        next(reader) # Skip the first headers row.
        for row in reader:
            first_array = row[0].split("(")
            name = first_array[0].strip()
            second_array = first_array[1].split(")")
            quote = second_array[0].strip()
            stock = { "name": name, "symbol": quote, "stockMarket": stock_market}
            stocks.append(stock)
        return stocks

    def read_stocks_from_multiple_files(self, file_path_list, stock_market):
        stocks = []
        for file_path in file_path_list:
            stocks.extend(self.read_stocks_from_file(file_path, stock_market))
        return stocks
Exemplo n.º 15
0
import threading

from flask import request, Blueprint, jsonify
from infrastructure import log

logger = log.get_logger("stocks_api")

def get_stocks_blueprint(domain, job, time_series):
    stocks_blueprint = Blueprint('stocks_api', __name__)

    @stocks_blueprint.route('', methods=['POST'])
    def add_stock():
        response = None
        new_stock = request.get_json()
        logger.info("post: %s", new_stock)
        if new_stock is None:
            response = jsonify({ "error": "Please provide a stock in the request body. It should have a name, a symbol and a stock market" }), 400
            return response
        name = new_stock.get("name", None)
        quote = new_stock.get("symbol", None)
        stock_market = new_stock.get("stockMarket", None)
        is_valid_stock = name and quote and stock_market
        if not is_valid_stock:
            response = jsonify({ "error": "Please provide a valid stock. It should have a name, a symbol and a stock market" }), 400
            return response
        # This validation (stockExistInDB) should be performed in the domain level, not in the API level.
        stock_exist_in_db = domain.stock_exists(quote)
        if stock_exist_in_db:
            response = jsonify({ "error": "The given stock already exists" }), 409
            return response
        # Add stock async
Exemplo n.º 16
0
from stocks import job, read, mongo, domain, download, stocks_api

from influxdb import InfluxDBClient

from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop

from flask import Flask

from apscheduler.schedulers.background import BackgroundScheduler

NYSE = "nyse"
NASDAQ = "nasdaq"

logger = log.get_logger("stockreader")

def get_config():
    # Read config parameters from a TOML file.
    config = None
    config_file_path = sys.argv[1]
    with open(config_file_path) as config_file:
        config = toml.loads(config_file.read())
    return config

def read_stocks_from_exchange_file(config, exchange):
    exchange_file_path_list = config[exchange]
    stocks_from_exchange = read.read_stocks_from_multiple_files(exchange_file_path_list, exchange)
    return stocks_from_exchange

# Initialize
Exemplo n.º 17
0
import pymongo
from pymongo import MongoClient
from pymongo.errors import DuplicateKeyError, BulkWriteError
from infrastructure import log

logger = log.get_logger("mongo")


class Mongo:

    SYMBOL_KEY = "symbol"
    DATE_KEY = "date"

    TRADING_DAYS_PER_YEAR = 252

    STOCK_LIST = "stocklist"
    STOCKS_CURRENT_DATA = "stocks_current_data"
    HISTORICAL_DATA_SUFIX = "_historical_data"

    def collection_exists(self, collection_name):
        return collection_name in self.db.collection_names()

    def create_regular_collection_if_not_exists(self, collection_name,
                                                index_key):
        if not self.collection_exists(collection_name):
            collection = self.db[collection_name]
            collection.create_index([(index_key, pymongo.ASCENDING)],
                                    unique=True)

    def create_historical_collection_if_not_exists(self, collection_name):
        if not self.collection_exists(collection_name):
Exemplo n.º 18
0
from flask import Blueprint, jsonify
from infrastructure import log

logger = log.get_logger("admin_api")


def get_admin_blueprint(time_series):
    admin_blueprint = Blueprint('admin_api', __name__)

    @admin_blueprint.route('/ping')
    def ping():
        logger.info("ping")
        time_series.save_async("API", {}, {"method": "ping"})
        return jsonify({"message": "pong"}), 200

    return admin_blueprint
Exemplo n.º 19
0
from stocks import job, read, mongo, domain, download, stocks_api

from influxdb import InfluxDBClient

from tornado.wsgi import WSGIContainer
from tornado.httpserver import HTTPServer
from tornado.ioloop import IOLoop

from flask import Flask

from apscheduler.schedulers.background import BackgroundScheduler

NYSE = "nyse"
NASDAQ = "nasdaq"

logger = log.get_logger("stockreader")


def get_config():
    # Read config parameters from a TOML file.
    config = None
    config_file_path = sys.argv[1]
    with open(config_file_path) as config_file:
        config = toml.loads(config_file.read())
    return config


def read_stocks_from_exchange_file(config, exchange):
    exchange_file_path_list = config[exchange]
    stocks_from_exchange = read.read_stocks_from_multiple_files(
        exchange_file_path_list, exchange)
Exemplo n.º 20
0
from datetime import date, timedelta
from concurrent.futures import ThreadPoolExecutor
from infrastructure import log

logger = log.get_logger("job")


class Job:

    WEEKS_AGO = 1
    YEARS_AGO = 10

    WORKERS = 8
    LESS_WORKERS = 3
    DAYS_FROM_TODAY = 7

    DAILY_UPDATE_HOUR = 18
    MONTHLY_UPDATE_HOUR = 23

    def __init__(self, domain, scheduler, time_series):
        self.domain = domain
        self.scheduler = scheduler
        self.time_series = time_series

    def get_number_of_workers(self, any_list):
        return len(any_list) if len(any_list) < self.WORKERS else self.WORKERS

    def get_stocks_if_empty_list(self, stocks):
        stocks = stocks if stocks else self.domain.get_stock_list()
        return stocks