def simulate(cycle=1, phase=0):
    db = DBAccess(env.DB_RESULT_NAME)
    db.clear_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME)
    db.clear_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME)
    sim = Simulator()
    granulation_solver = GranulationSolver(sim.nodes, sim.sales_plan)
    granulation_results = granulation_solver.launch_granulation_solver()
Exemple #2
0
def JESA_DropAll():
    db_access = DBAccess(env.DB_NAME)
    for collection in COLLECTIONS_CACHE:
        db_access.clear_collection(collection)
    collections = list(COLLECTIONS_CACHE)
    COLLECTIONS_CACHE.clear()
    return "|".join(collections)
Exemple #3
0
def save_data():
    try:
        reset_db_name(request.json['db_name'])
        records = request.json['table']
        db = DBAccess(env.DB_NAME)
        name_ = trim_collection_name(request.json['name'])
        db.clear_collection(name_)
        db.save_to_db(name_, records)
        return jsonify(status=env.HTML_STATUS.OK)
    except Exception as e:
        logger.error("Cannot save data: %s" % e)
        return jsonify(status=env.HTML_STATUS.ERROR)
Exemple #4
0
def get_best_global_scenarios(quantile_step):
    db = DBAccess(env.DB_RESULT_NAME)
    db.clear_collection(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME)

    scenarios = db.get_records(env.DB_GLOBAL_RESULT_COLLECTION_NAME,
                               {}).sort([("Cost PV", DESCENDING)])
    step = int(quantile_step * scenarios.count())
    representative_scenarios = [
        scenarios.skip(step * i)[0]
        for i in range(0, int(scenarios.count() / step))
    ]
    db.save_to_db_no_check(env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME,
                           representative_scenarios)
Exemple #5
0
def JESA_UploadTable(name, table, db_name="mine2farm"):
    records = []
    header = list(table)
    for row in table.iterrows():
        record = {}
        for h in header:
            record[h] = row[1][h]
        records.append(record)

    env.DB_NAME = db_name
    db_access = DBAccess(env.DB_NAME)
    name_ = trim_collection_name(name)
    db_access.clear_collection(name_)
    db_access.save_to_db(name_, records)
    COLLECTIONS_CACHE.add(name_)
    return "%s Saved! @%s" % (name_, datetime.now().strftime("%H:%M:%S"))
Exemple #6
0
def simulate(cycle=1, phase=0, use_db=False):
    if use_db:
        db = DBAccess(env.DB_RESULT_NAME)
        db.clear_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME)
        db.clear_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME)
    scenarios_global, scenarios_details = Simulator().simulate(
        cycle, phase, logistics_lp=False)
    if use_db:
        for scenario in scenarios_global:
            db.save_to_db_no_check(env.DB_GLOBAL_RESULT_COLLECTION_NAME,
                                   scenarios_global[scenario])

        for scenario in scenarios_details:
            json_data = json.dumps(NodeJSONEncoder().encode(
                scenarios_details[scenario]))
            data = json.loads(json.loads(json_data))
            db.save_to_db_no_check(env.DB_DETAILED_RESULT_COLLECTION_NAME,
                                   data)
Exemple #7
0
def get_best_detailed_scenarios(quantile_step):
    db = DBAccess(env.DB_RESULT_NAME)
    db.clear_collection(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME)

    scenarios = db.get_fields(env.DB_GLOBAL_RESULT_COLLECTION_NAME, {
        "Cost PV": 1,
        "Scenario": 1
    }, [("Cost PV", DESCENDING)])
    step = int(quantile_step * scenarios.count())
    points = [
        scenarios.skip(step * i)[0]["Scenario"]
        for i in range(0, int(scenarios.count() / step))
    ]
    representative_scenarios = db.get_records(
        env.DB_DETAILED_RESULT_COLLECTION_NAME, {"Scenario": {
            "$in": points
        }})
    db.save_to_db_no_check(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME,
                           representative_scenarios)
Exemple #8
0
    def serve(self, cycle):
        """
        Crating tasks and sending to broker
        :param cycle:
        :return:
        """
        # reset scenarios table
        db = DBAccess(env.DB_RESULT_NAME)
        db.clear_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME)
        db.clear_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME)
        db.clear_collection(env.DB_SENSITIVITY_COLLECTION_NAME)
        db.create_index(
            env.DB_GLOBAL_RESULT_COLLECTION_NAME,
            [("Cost PV", pymongo.DESCENDING), ("Scenario", pymongo.ASCENDING)]
        )
        db.create_index(
            env.DB_DETAILED_RESULT_COLLECTION_NAME,
            [("Scenario", pymongo.ASCENDING)]
        )
        db.save_to_db_no_check(env.DB_SENSITIVITY_COLLECTION_NAME, {"NH3": 0, "ACS": 0, "HCl": 0, "Raw water": 0,
                                                                    "Electricity": 0, "K09": 0, "Rock": 0,
                                                                    "Scenario": -1})

        data = []
        for i in range(cycle):
            data.append(json.dumps({
                "cycle": cycle,
                "phase": i,
                "db_name": env.DB_NAME,
                "logistics_lp": env.LOGISTICS_LP
            }))
        broker = Broker(env.RABBITMQ_SIMULATOR_QUEUE_NAME)
        broker.publish(data)
from app.model.Simulator import *
import cProfile
from multiprocessing import Pool, TimeoutError, Process
from app.data.DBAccess import DBAccess
from flask import Response, render_template

from flask import Flask
import dash
import dash_bootstrap_components as dbc
import dash_core_components as dcc
import dash_html_components as html
from dash.dependencies import Input, Output

server = Flask(__name__)
db = DBAccess(env.DB_RESULT_NAME)
db.clear_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME)
db.clear_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME)
simulator = Simulator()
@server.route('/')
def inddex():
    return 'Test'

app = dash.Dash(__name__,
                server=server,
                routes_pathname_prefix='/dash/',
                external_stylesheets=[dbc.themes.BOOTSTRAP]
                )

app.layout = html.Div(
    [
        dbc.Progress(id="progress", value=0, striped=True, animated=True),
Exemple #10
0
def get_best_scenarios(quantile_step, db_name="mine2farm"):
    update_cache(db_name, -1)
    try:
        time_start = datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S")

        # insert status of best scenarios "running"
        db_history = DBAccess(env.MONITORING_DB_NAME)
        query_insert = {
            'time_start': time_start,
            'db_name': db_name,
            'quantile_step': quantile_step,
            'status': -1
        }
        _id = db_history.save_to_db_no_check(
            env.MONITORING_COLLECTION_HISTORY_BEST_NAME, query_insert)

        # get best representative scenarios
        quantile_step = quantile_step / 100.
        reset_db_name(db_name)
        db = DBAccess(env.DB_RESULT_NAME)
        logger.info("Deleting best collections from DB")
        db.clear_collection(env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME)
        db.clear_collection(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME)
        scenarios = db.get_records(env.DB_GLOBAL_RESULT_COLLECTION_NAME,
                                   {}).sort([("Cost PV", DESCENDING)])

        scenarios_count = scenarios.count()
        step = int(quantile_step * scenarios_count)
        # save to db
        if step == 0:
            # all scenarios are concerned
            logger.info("Moving all scenarios to best collections")
            db.copy_to_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME,
                                  env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME)
            db.copy_to_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME,
                                  env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME)
            details_count = db.count(
                env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME)
        else:
            # filter on specific scenarios
            representative_scenario_ids = [
                scenarios.skip(step * i)[0]["Scenario"]
                for i in range(0, int(scenarios_count / step))
            ]
            logger.info("List of selected best scenarios: %s" %
                        representative_scenario_ids)
            # simulate
            scenarios_global, scenarios_details = \
                Simulator().simulate(scenarios_filter=representative_scenario_ids, logistics_lp=env.LOGISTICS_LP)
            # save
            for scenario in scenarios_global:
                db.save_to_db_no_check(
                    env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME,
                    scenarios_global[scenario])
            for scenario in scenarios_details:
                json_data = json.dumps(NodeJSONEncoder().encode(
                    scenarios_details[scenario]))
                data = json.loads(json.loads(json_data))
                db.save_to_db_no_check(
                    env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME, data)
            details_count = len(scenarios_details)

        # status update
        query_insert['global_count'] = scenarios_count
        query_insert['detailed_count'] = details_count
        filter_ = {'_id': ObjectId(_id)}
        db_history.update_record(
            collection=env.MONITORING_COLLECTION_HISTORY_BEST_NAME,
            filter_=filter_,
            data=query_insert)

        # raw materials sensitivities
        logger.info("Running sensitivity over raw materials")
        db.clear_collection(env.DB_SENSITIVITY_COLLECTION_NAME)
        raw_materials_df = Driver().get_data("raw_materials")
        shocks = {}
        for raw_material in raw_materials_df:
            item = raw_material["Item"]
            shocks[item] = 1
        scenarios_df = pd.DataFrame(Driver().get_results(
            env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME))
        scenarios_dic = Utils.get_scenario_from_df(scenarios_df)
        risk_engine = RiskEngine()

        for scenario_id in scenarios_dic:
            deltas = risk_engine.compute_delta(scenarios_dic[scenario_id],
                                               shocks,
                                               with_logistics=env.LOGISTICS_LP)

            deltas['Scenario'] = int(scenario_id)
            db.save_to_db_no_check(env.DB_SENSITIVITY_COLLECTION_NAME, deltas)

        # status update
        query_insert['time_end'] = datetime.datetime.now().strftime(
            "%d/%m/%y %H:%M:%S")
        query_insert['status'] = 0
        filter_ = {'_id': ObjectId(_id)}
        db_history.update_record(
            collection=env.MONITORING_COLLECTION_HISTORY_BEST_NAME,
            filter_=filter_,
            data=query_insert)
        update_cache(db_name, 0)

    except Exception as e:
        logger.error("Best scenarios failed")
        update_cache(db_name, 0)
Exemple #11
0
from app.config.env_func import reset_db_name
from app.config.env import DB_SENSITIVITY_COLLECTION_NAME
from app.data.DBAccess import DBAccess
import pandas as pd
import app.config.env as env
from app.data.Client import Driver
from app.risk.RiskEngine import RiskEngine
from tqdm import tqdm
import json
from app.tools import Utils

if __name__ == "__main__":
    reset_db_name('mine2farm')
    db = DBAccess(env.DB_RESULT_NAME)
    db.clear_collection(DB_SENSITIVITY_COLLECTION_NAME)
    raw_materials_sensitivity = []
    raw_materials_df = Driver().get_data("raw_materials")
    shocks = {}
    for raw_material in raw_materials_df:
        item = raw_material["Item"]
        shocks[item] = 1
    #scenarios_df = pd.DataFrame(Driver().get_results(DB_GLOBAL_BEST_RESULT_COLLECTION_NAME))
    scenarios_df = pd.read_csv(env.APP_FOLDER + "outputs/global.csv")
    scenarios_dic = Utils.get_scenario_from_df(scenarios_df)
    for scenario_id in scenarios_dic:
        risk_engine = RiskEngine()
        deltas = risk_engine.compute_delta(scenarios_dic[scenario_id], shocks)
        deltas['Scenario'] = int(scenario_id)
        db.save_to_db_no_check(DB_SENSITIVITY_COLLECTION_NAME, deltas)