Exemple #1
0
def add_worker(db_name):
    """
    add new worker and start consumer
    :return: None
    """
    logger.info('add worker')
    reset_db_name(db_name)
    worker = SimulationWorker()
    worker.consume()
Exemple #2
0
def start_result_worker(queue_name, collecttion_name, db_name):
    """
    :param queue_name: RABBITMQ QUEUE NAME
    :param collecttion_name: COLLECTION NAME
    :return: None
    """
    logger.info('Start worker results')
    reset_db_name(db_name)
    worker = ResultWorker(queue_name, collecttion_name)
    worker.consume()
Exemple #3
0
    def simulate(self, ch, method, properties, body):
        """
        Callback function called for each task
        :param ch:
        :param method:
        :param properties:
        :param body:
        :return: None
        """
        logger.info(" [*] Running simulation %r" % body)

        data = json.loads(body)

        cycle = data["cycle"]
        phase = data["phase"]
        time_start = datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")
        if "db_name" in data:
            reset_db_name(data['db_name'])
        if "logistics_lp" in data:
            env.LOGISTICS_LP = data["logistics_lp"]
        detailed_publisher = ResultSaver(
            env.RABBITMQ_DETAILED_RESULT_QUEUE_NAME, env.RESULT_BATCHES_SIZE)
        global_publisher = ResultSaver(env.RABBITMQ_GLOBAL_RESULT_QUEUE_NAME,
                                       env.RESULT_BATCHES_SIZE)

        try:
            s = Simulator()
            s.simulate(cycle,
                       phase, {
                           "details": detailed_publisher,
                           "global": global_publisher
                       },
                       monitor=True,
                       logistics_lp=env.LOGISTICS_LP)

            detailed_publisher.close()
            global_publisher.close()
            logger.info(" [x] Done")
        except Exception as e:
            task_to_save = dict()
            task_to_save['db_name'] = env.DB_NAME
            task_to_save['time_start'] = time_start
            task_to_save['total_scenario'] = 0
            message = "Worker failed: %s" % (str(e))
            logger.warning("Worker failed: %s" % (str(e)))
            insert_history(phase=phase,
                           task_to_save=task_to_save,
                           status=env.HTML_STATUS.ERROR.value,
                           message=message)
            global_publisher.close()
            detailed_publisher.close()
            logger.info(" [x] Done with error")
        ch.basic_ack(delivery_tag=method.delivery_tag)
Exemple #4
0
def save_data():
    try:
        reset_db_name(request.json['db_name'])
        records = request.json['table']
        db = DBAccess(env.DB_NAME)
        name_ = trim_collection_name(request.json['name'])
        db.clear_collection(name_)
        db.save_to_db(name_, records)
        return jsonify(status=env.HTML_STATUS.OK)
    except Exception as e:
        logger.error("Cannot save data: %s" % e)
        return jsonify(status=env.HTML_STATUS.ERROR)
Exemple #5
0
def get_all_results(db_name, collection):
    """
    Get results from collection
    :param db_name: name of current database
    :param collection: name
    :return: JSON
    """
    reset_db_name(db_name)
    records, _ = DBAccess(env.DB_RESULT_NAME).get_all_records(collection)
    for record in records:
        _id = record["_id"]
        record.pop("_id", None)
    return jsonify(records)
Exemple #6
0
def get_result(db_name, collection, scenario_id):
    """
    Get results from collection
    :param db_name: name of current database
    :param collection: name
    :param scenario_id: scenario id
    :return: JSON
    """
    reset_db_name(db_name)
    record = DBAccess(env.DB_RESULT_NAME).get_one_record(
        collection, {"Scenario": int(scenario_id)})
    _id = record["_id"]
    record.pop("_id", None)
    return jsonify(record)
Exemple #7
0
def get_records_into_dics(db_name, collection):
    """
    Get records from collection and transform into dictionary
    :param db_name: name of current database
    :param collection: name
    :return: JSON
    """
    reset_db_name(db_name)
    records = DBAccess(env.DB_NAME).get_all_records(collection)
    dic_records = {}
    for record in records:
        _id = record["_id"]
        record.pop("_id", None)
        dic_records[str(_id)] = record
    return json.dumps(JSONEncoder().encode(records))
Exemple #8
0
def get_results(db_name, collection, scenario_id):
    """
    Get results from collection
    :param db_name: name of current database
    :param collection: name
    :param scenario_id: scenario id
    :return: JSON
    """
    reset_db_name(db_name)

    cursor = DBAccess(env.DB_RESULT_NAME).get_records(
        collection, {"Scenario": int(scenario_id)})
    records = []
    for record in cursor:
        record.pop("_id", None)
        records.append(record)
    return jsonify(records)
Exemple #9
0
import unittest
import pandas as pd
import time

from app.config import env
from app.config.env import ScenarioGeneratorType, PipelineLayer
from app.config.env_func import reset_db_name
from app.data.Client import Driver
from app.data.DataManager import DataManager
from app.model.Simulator import Simulator
from app.risk.RiskEngine import RiskEngine
from app.tools import Utils
from app.model.ScenarioGenerator import ScenarioGeneratorFactory as SGF

reset_db_name("mine2farm")
dm = DataManager()
dm.load_data()


class PricingTestSuite(unittest.TestCase):
    """Pricing test cases."""
    def __init__(self, *args, **kwargs):
        super(PricingTestSuite, self).__init__(*args, **kwargs)
        scenarios_df = pd.read_csv(env.APP_FOLDER +
                                   "tests/data/one_scenario.csv")
        self.scenarios_dic = Utils.get_scenario_from_df(scenarios_df)
        scenario_id = 1
        self.simulator = Simulator(dm=dm,
                                   monikers_filter=sum(
                                       self.scenarios_dic[scenario_id], []))
# -*- coding: utf-8 -*-

import app.dashboard.DBHandler as DBHandler
import sys

from app.config.env_func import reset_db_name

if __name__ == "__main__":
    quantile_step = 1 if len(sys.argv) < 2 else float(sys.argv[1])
    db_name = "mine2farm"
    if len(sys.argv) > 1:
        db_name = sys.argv[2]
    reset_db_name(db_name)
    DBHandler.get_best_scenarios(quantile_step, db_name)
Exemple #11
0
def get_best_scenarios(quantile_step, db_name="mine2farm"):
    update_cache(db_name, -1)
    try:
        time_start = datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")

        # insert status of best scenarios "running"
        db_history = DBAccess(env.MONITORING_DB_NAME)
        query_insert = {
            'time_start': time_start,
            'db_name': db_name,
            'quantile_step': quantile_step,
            'status': -1
        }
        _id = db_history.save_to_db_no_check(
            env.MONITORING_COLLECTION_HISTORY_BEST_NAME, query_insert)

        # get best representative scenarios
        quantile_step = quantile_step / 100.
        reset_db_name(db_name)
        db = DBAccess(env.DB_RESULT_NAME)
        logger.info("Deleting best collections from DB")
        db.clear_collection(env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME)
        db.clear_collection(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME)
        scenarios = db.get_records(env.DB_GLOBAL_RESULT_COLLECTION_NAME,
                                   {}).sort([("Cost PV", DESCENDING)])

        scenarios_count = scenarios.count()
        step = int(quantile_step * scenarios_count)
        # save to db
        if step == 0:
            # all scenarios are concerned
            logger.info("Moving all scenarios to best collections")
            db.copy_to_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME,
                                  env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME)
            db.copy_to_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME,
                                  env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME)
            details_count = db.count(
                env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME)
        else:
            # filter on specific scenarios
            representative_scenario_ids = [
                scenarios.skip(step * i)[0]["Scenario"]
                for i in range(0, int(scenarios_count / step))
            ]
            logger.info("List of selected best scenarios: %s" %
                        representative_scenario_ids)
            # simulate
            scenarios_global, scenarios_details = \
                Simulator().simulate(scenarios_filter=representative_scenario_ids, logistics_lp=env.LOGISTICS_LP)
            # save
            for scenario in scenarios_global:
                db.save_to_db_no_check(
                    env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME,
                    scenarios_global[scenario])
            for scenario in scenarios_details:
                json_data = json.dumps(NodeJSONEncoder().encode(
                    scenarios_details[scenario]))
                data = json.loads(json.loads(json_data))
                db.save_to_db_no_check(
                    env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME, data)
            details_count = len(scenarios_details)

        # status update
        query_insert['global_count'] = scenarios_count
        query_insert['detailed_count'] = details_count
        filter_ = {'_id': ObjectId(_id)}
        db_history.update_record(
            collection=env.MONITORING_COLLECTION_HISTORY_BEST_NAME,
            filter_=filter_,
            data=query_insert)

        # raw materials sensitivities
        logger.info("Running sensitivity over raw materials")
        db.clear_collection(env.DB_SENSITIVITY_COLLECTION_NAME)
        raw_materials_df = Driver().get_data("raw_materials")
        shocks = {}
        for raw_material in raw_materials_df:
            item = raw_material["Item"]
            shocks[item] = 1
        scenarios_df = pd.DataFrame(Driver().get_results(
            env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME))
        scenarios_dic = Utils.get_scenario_from_df(scenarios_df)
        risk_engine = RiskEngine()

        for scenario_id in scenarios_dic:
            deltas = risk_engine.compute_delta(scenarios_dic[scenario_id],
                                               shocks,
                                               with_logistics=env.LOGISTICS_LP)

            deltas['Scenario'] = int(scenario_id)
            db.save_to_db_no_check(env.DB_SENSITIVITY_COLLECTION_NAME, deltas)

        # status update
        query_insert['time_end'] = datetime.datetime.now().strftime(
            "%d/%m/%y %H:%M:%S")
        query_insert['status'] = 0
        filter_ = {'_id': ObjectId(_id)}
        db_history.update_record(
            collection=env.MONITORING_COLLECTION_HISTORY_BEST_NAME,
            filter_=filter_,
            data=query_insert)
        update_cache(db_name, 0)

    except Exception as e:
        logger.error("Best scenarios failed")
        update_cache(db_name, 0)
Exemple #12
0
# -*- coding: utf-8 -*-

from app.config.env_func import reset_db_name
from app.config.env import DB_SENSITIVITY_COLLECTION_NAME
from app.data.DBAccess import DBAccess
import pandas as pd
import app.config.env as env
from app.data.Client import Driver
from app.risk.RiskEngine import RiskEngine
from tqdm import tqdm
import json
from app.tools import Utils

if __name__ == "__main__":
    reset_db_name('mine2farm')
    db = DBAccess(env.DB_RESULT_NAME)
    db.clear_collection(DB_SENSITIVITY_COLLECTION_NAME)
    raw_materials_sensitivity = []
    raw_materials_df = Driver().get_data("raw_materials")
    shocks = {}
    for raw_material in raw_materials_df:
        item = raw_material["Item"]
        shocks[item] = 1
    #scenarios_df = pd.DataFrame(Driver().get_results(DB_GLOBAL_BEST_RESULT_COLLECTION_NAME))
    scenarios_df = pd.read_csv(env.APP_FOLDER + "outputs/global.csv")
    scenarios_dic = Utils.get_scenario_from_df(scenarios_df)
    for scenario_id in scenarios_dic:
        risk_engine = RiskEngine()
        deltas = risk_engine.compute_delta(scenarios_dic[scenario_id], shocks)
        deltas['Scenario'] = int(scenario_id)
        db.save_to_db_no_check(DB_SENSITIVITY_COLLECTION_NAME, deltas)