def JESA_DropAll(): db_access = DBAccess(env.DB_NAME) for collection in COLLECTIONS_CACHE: db_access.clear_collection(collection) collections = list(COLLECTIONS_CACHE) COLLECTIONS_CACHE.clear() return "|".join(collections)
def load_simul_option_from_db(simuloption): table = [] # reading option current values from database records, ncol = DBAccess(env.DB_NAME).get_all_records(simuloption) records.pop("_id", None) table = ff.create_table(records, index=True, index_title=simuloption + ' current values', height_constant=20) table.layout.width = 650 return table
def get_result(db_name, collection, scenario_id): """ Get results from collection :param db_name: name of current database :param collection: name :param scenario_id: scenario id :return: JSON """ reset_db_name(db_name) record = DBAccess(env.DB_RESULT_NAME).get_one_record( collection, {"Scenario": int(scenario_id)}) _id = record["_id"] record.pop("_id", None) return jsonify(record)
def index(): """ Get dashboard monitoring :return: template """ assets_path = "/" dashboard_url = "http://127.0.0.1:5000/" manager_rabbit = ManagerRabbit() consumers = manager_rabbit.get_list_consumers() check_memcached() except_dbs = ['admin', 'config', 'local', env.MONITORING_DB_NAME] db_names = [database for database in DBAccess.get_dbs_names() if database not in except_dbs and '_results' not in database] return render_template('index.html', service_status=check_service_data(), dashboard_url=dashboard_url, db_names=db_names, cycle=env.RABBITMQ_CYCLE, consumers=consumers, images=images, assets_path=assets_path, nb_pr_page=env.MONITORING_NB_PAGE, count_worker=check_max_worker(env.RABBITMQ_SIMULATOR_QUEUE_NAME), global_result_worker=check_worker_result(RABBITMQ_GLOBAL_RESULT_QUEUE_NAME), detailed_result_worker=check_worker_result(RABBITMQ_DETAILED_RESULT_QUEUE_NAME), queue_simulate=env.RABBITMQ_SIMULATOR_QUEUE_NAME, canvas_url=env.CANVAS_URL, logistics_lp=env.LOGISTICS_LP )
def get_history(context=None): """ Get history with pagination for best scenarios and task monitor :param context: String (best or None) :return: JSON """ if request.json['current_page'] and request.json['nb_pr_page']: db = DBAccess(env.MONITORING_DB_NAME) if context == 'best': collection = env.MONITORING_COLLECTION_HISTORY_BEST_NAME else: collection = env.MONITORING_COLLECTION_HISTORY_NAME list_history, total_items = db.get_records_with_pagination(collection=collection, filter_=None,sort_key="_id", sort_direction=-1, current_page=request.json['current_page'], nb_pr_page=request.json['nb_pr_page']) return jsonify(listHistory=json.loads(dumps(list_history)), total_items=total_items)
def simulate(cycle=1, phase=0, use_db=False): if use_db: db = DBAccess(env.DB_RESULT_NAME) db.clear_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME) db.clear_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME) scenarios_global, scenarios_details = Simulator().simulate( cycle, phase, logistics_lp=False) if use_db: for scenario in scenarios_global: db.save_to_db_no_check(env.DB_GLOBAL_RESULT_COLLECTION_NAME, scenarios_global[scenario]) for scenario in scenarios_details: json_data = json.dumps(NodeJSONEncoder().encode( scenarios_details[scenario])) data = json.loads(json.loads(json_data)) db.save_to_db_no_check(env.DB_DETAILED_RESULT_COLLECTION_NAME, data)
def get_all_records(): '''Returns all the scenario results that are stored in the database''' records = {} all_records, nb_docs = DBAccess(env.DB_RESULT_NAME).get_all_records("scenarios") for i in range(nb_docs): records.update(all_records[i]) records.pop("_id", None) #records = DBAccess(env.DB_RESULT_NAME).get_records("scenarios") return records
def insert_history(phase, task_to_save, status, message): """ Insert history for monitoring :param phase: Int :param task_to_save: Dict :param status: Int :param message: String :return: None """ query_insert = dict() query_insert['phase'] = phase query_insert['status'] = status query_insert['message'] = message query_insert['time_start'] = task_to_save['time_start'] query_insert['time_end'] = datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S") query_insert['db_name'] = task_to_save['db_name'] query_insert['total_scenario'] = task_to_save['total_scenario'] db = DBAccess(env.MONITORING_DB_NAME) db.save_to_db_no_check(env.MONITORING_COLLECTION_HISTORY_NAME, query_insert)
def simulate(cycle=1, phase=0): db = DBAccess(env.DB_RESULT_NAME) db.clear_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME) db.clear_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME) sim = Simulator() granulation_solver = GranulationSolver(sim.nodes, sim.sales_plan) granulation_results = granulation_solver.launch_granulation_solver()
def get_all_results(db_name, collection): """ Get results from collection :param db_name: name of current database :param collection: name :return: JSON """ reset_db_name(db_name) records, _ = DBAccess(env.DB_RESULT_NAME).get_all_records(collection) for record in records: _id = record["_id"] record.pop("_id", None) return jsonify(records)
def get_best_global_scenarios(quantile_step): db = DBAccess(env.DB_RESULT_NAME) db.clear_collection(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME) scenarios = db.get_records(env.DB_GLOBAL_RESULT_COLLECTION_NAME, {}).sort([("Cost PV", DESCENDING)]) step = int(quantile_step * scenarios.count()) representative_scenarios = [ scenarios.skip(step * i)[0] for i in range(0, int(scenarios.count() / step)) ] db.save_to_db_no_check(env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME, representative_scenarios)
def get_records_into_dics(db_name, collection): """ Get records from collection and transform into dictionary :param db_name: name of current database :param collection: name :return: JSON """ reset_db_name(db_name) records = DBAccess(env.DB_NAME).get_all_records(collection) dic_records = {} for record in records: _id = record["_id"] record.pop("_id", None) dic_records[str(_id)] = record return json.dumps(JSONEncoder().encode(records))
def get_results(db_name, collection, scenario_id): """ Get results from collection :param db_name: name of current database :param collection: name :param scenario_id: scenario id :return: JSON """ reset_db_name(db_name) cursor = DBAccess(env.DB_RESULT_NAME).get_records( collection, {"Scenario": int(scenario_id)}) records = [] for record in cursor: record.pop("_id", None) records.append(record) return jsonify(records)
def save_data(): try: reset_db_name(request.json['db_name']) records = request.json['table'] db = DBAccess(env.DB_NAME) name_ = trim_collection_name(request.json['name']) db.clear_collection(name_) db.save_to_db(name_, records) return jsonify(status=env.HTML_STATUS.OK) except Exception as e: logger.error("Cannot save data: %s" % e) return jsonify(status=env.HTML_STATUS.ERROR)
def get_best_detailed_scenarios(quantile_step): db = DBAccess(env.DB_RESULT_NAME) db.clear_collection(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME) scenarios = db.get_fields(env.DB_GLOBAL_RESULT_COLLECTION_NAME, { "Cost PV": 1, "Scenario": 1 }, [("Cost PV", DESCENDING)]) step = int(quantile_step * scenarios.count()) points = [ scenarios.skip(step * i)[0]["Scenario"] for i in range(0, int(scenarios.count() / step)) ] representative_scenarios = db.get_records( env.DB_DETAILED_RESULT_COLLECTION_NAME, {"Scenario": { "$in": points }}) db.save_to_db_no_check(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME, representative_scenarios)
def check_worker(): """ Get information for each Workers :return: JSON """ manager_rabbit = ManagerRabbit() list_queues = manager_rabbit.get_list_queues() consumers = manager_rabbit.get_list_consumers() workers_info = dict() # get status best scenarios for current db_name best_scenarios_status = dict() except_dbs = ['admin', 'config', 'local', env.MONITORING_DB_NAME] db_names = [database for database in DBAccess.get_dbs_names() if database not in except_dbs and '_results' not in database] if ('db_name' in request.json) and (request.json['db_name'] is not None): best_scenarios_status[request.json['db_name']] = env.HTML_STATUS.OK.value if memcached_client.get(request.json['db_name']): best_scenarios_status[request.json['db_name']]=memcached_client.get(request.json['db_name']) else: return jsonify(status=env.HTML_STATUS.ERROR.value) # Status worker global results worker_global_result = check_worker_result(RABBITMQ_GLOBAL_RESULT_QUEUE_NAME) # Status worker detailed results worker_detailed_result = check_worker_result(RABBITMQ_DETAILED_RESULT_QUEUE_NAME) # Count workers running count_worker = check_max_worker(env.RABBITMQ_SIMULATOR_QUEUE_NAME) for phase in range(env.RABBITMQ_CYCLE): if memcached_client.get("workers_info_%i" % phase): workers_info[phase] = memcached_client.get("workers_info_%i" % phase)[str(phase)] return jsonify(workersInfo=workers_info, list_queues=list_queues, consumers=consumers, best_scenarios_status=best_scenarios_status, db_names=db_names, worker_global_result=worker_global_result, worker_detailed_result=worker_detailed_result, count_worker=count_worker )
def save_results(self, ch, method, properties, body): """ Callback function called for each task :param ch: :param method: :param properties: :param body: :return: None """ logger.info(" [*] Saving results %r" % body[0:env.HEAD_DATA_BITS]) message = body[env.HEAD_DATA_BITS:] message_db = str(body[env.HEAD_DATA_BITS:env.HEAD_DATA_BITS + env.DB_NAME_BITS], 'utf-8') dol_index = message_db.find("$") db_name = message_db[0:dol_index] data = json.loads(json.loads(message[dol_index + 1:])) if isinstance(data, dict) and "timestamp" not in data: data["timestamp"] = datetime.now() DBAccess('%s_results' % db_name).save_to_db_no_check(self.collection_name, data) logger.info(" [x] Done") ch.basic_ack(delivery_tag=method.delivery_tag)
def JESA_UploadTable(name, table, db_name="mine2farm"): records = [] header = list(table) for row in table.iterrows(): record = {} for h in header: record[h] = row[1][h] records.append(record) env.DB_NAME = db_name db_access = DBAccess(env.DB_NAME) name_ = trim_collection_name(name) db_access.clear_collection(name_) db_access.save_to_db(name_, records) COLLECTIONS_CACHE.add(name_) return "%s Saved! @%s" % (name_, datetime.now().strftime("%H:%M:%S"))
def get_best_scenarios(quantile_step, db_name="mine2farm"): update_cache(db_name, -1) try: time_start = datetime.datetime.now().strftime("%d/%m/%y %H:%M:%S") # insert status of best scenarios "running" db_history = DBAccess(env.MONITORING_DB_NAME) query_insert = { 'time_start': time_start, 'db_name': db_name, 'quantile_step': quantile_step, 'status': -1 } _id = db_history.save_to_db_no_check( env.MONITORING_COLLECTION_HISTORY_BEST_NAME, query_insert) # get best representative scenarios quantile_step = quantile_step / 100. reset_db_name(db_name) db = DBAccess(env.DB_RESULT_NAME) logger.info("Deleting best collections from DB") db.clear_collection(env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME) db.clear_collection(env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME) scenarios = db.get_records(env.DB_GLOBAL_RESULT_COLLECTION_NAME, {}).sort([("Cost PV", DESCENDING)]) scenarios_count = scenarios.count() step = int(quantile_step * scenarios_count) # save to db if step == 0: # all scenarios are concerned logger.info("Moving all scenarios to best collections") db.copy_to_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME, env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME) db.copy_to_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME, env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME) details_count = db.count( env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME) else: # filter on specific scenarios representative_scenario_ids = [ scenarios.skip(step * i)[0]["Scenario"] for i in range(0, int(scenarios_count / step)) ] logger.info("List of selected best scenarios: %s" % representative_scenario_ids) # simulate scenarios_global, scenarios_details = \ Simulator().simulate(scenarios_filter=representative_scenario_ids, logistics_lp=env.LOGISTICS_LP) # save for scenario in scenarios_global: db.save_to_db_no_check( env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME, scenarios_global[scenario]) for scenario in scenarios_details: json_data = json.dumps(NodeJSONEncoder().encode( scenarios_details[scenario])) data = json.loads(json.loads(json_data)) db.save_to_db_no_check( env.DB_DETAILED_BEST_RESULT_COLLECTION_NAME, data) details_count = len(scenarios_details) # status update query_insert['global_count'] = scenarios_count query_insert['detailed_count'] = details_count filter_ = {'_id': ObjectId(_id)} db_history.update_record( collection=env.MONITORING_COLLECTION_HISTORY_BEST_NAME, filter_=filter_, data=query_insert) # raw materials sensitivities logger.info("Running sensitivity over raw materials") db.clear_collection(env.DB_SENSITIVITY_COLLECTION_NAME) raw_materials_df = Driver().get_data("raw_materials") shocks = {} for raw_material in raw_materials_df: item = raw_material["Item"] shocks[item] = 1 scenarios_df = pd.DataFrame(Driver().get_results( env.DB_GLOBAL_BEST_RESULT_COLLECTION_NAME)) scenarios_dic = Utils.get_scenario_from_df(scenarios_df) risk_engine = RiskEngine() for scenario_id in scenarios_dic: deltas = risk_engine.compute_delta(scenarios_dic[scenario_id], shocks, with_logistics=env.LOGISTICS_LP) deltas['Scenario'] = int(scenario_id) db.save_to_db_no_check(env.DB_SENSITIVITY_COLLECTION_NAME, deltas) # status update query_insert['time_end'] = datetime.datetime.now().strftime( "%d/%m/%y %H:%M:%S") query_insert['status'] = 0 filter_ = {'_id': ObjectId(_id)} db_history.update_record( collection=env.MONITORING_COLLECTION_HISTORY_BEST_NAME, filter_=filter_, data=query_insert) update_cache(db_name, 0) except Exception as e: logger.error("Best scenarios failed") update_cache(db_name, 0)
def get_sales_plan(): '''Returns all the scenario results that are stored in the database''' records = DBAccess(env.DB_NAME).get_all_records('sales_plan') # records = DBAccess(env.DB_RESULT_NAME).get_records("scenarios") records.pop("_id", None) return records
def serve(self, cycle): """ Crating tasks and sending to broker :param cycle: :return: """ # reset scenarios table db = DBAccess(env.DB_RESULT_NAME) db.clear_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME) db.clear_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME) db.clear_collection(env.DB_SENSITIVITY_COLLECTION_NAME) db.create_index( env.DB_GLOBAL_RESULT_COLLECTION_NAME, [("Cost PV", pymongo.DESCENDING), ("Scenario", pymongo.ASCENDING)] ) db.create_index( env.DB_DETAILED_RESULT_COLLECTION_NAME, [("Scenario", pymongo.ASCENDING)] ) db.save_to_db_no_check(env.DB_SENSITIVITY_COLLECTION_NAME, {"NH3": 0, "ACS": 0, "HCl": 0, "Raw water": 0, "Electricity": 0, "K09": 0, "Rock": 0, "Scenario": -1}) data = [] for i in range(cycle): data.append(json.dumps({ "cycle": cycle, "phase": i, "db_name": env.DB_NAME, "logistics_lp": env.LOGISTICS_LP })) broker = Broker(env.RABBITMQ_SIMULATOR_QUEUE_NAME) broker.publish(data)
from app.data.DBAccess import DBAccess from app.model.Simulator import * import cProfile from multiprocessing import Pool, TimeoutError, Process from app.data.DBAccess import DBAccess from flask import Response, render_template from flask import Flask import dash import dash_bootstrap_components as dbc import dash_core_components as dcc import dash_html_components as html from dash.dependencies import Input, Output server = Flask(__name__) db = DBAccess(env.DB_RESULT_NAME) db.clear_collection(env.DB_GLOBAL_RESULT_COLLECTION_NAME) db.clear_collection(env.DB_DETAILED_RESULT_COLLECTION_NAME) simulator = Simulator() @server.route('/') def inddex(): return 'Test' app = dash.Dash(__name__, server=server, routes_pathname_prefix='/dash/', external_stylesheets=[dbc.themes.BOOTSTRAP] ) app.layout = html.Div( [
# -*- coding: utf-8 -*- from app.config.env_func import reset_db_name from app.config.env import DB_SENSITIVITY_COLLECTION_NAME from app.data.DBAccess import DBAccess import pandas as pd import app.config.env as env from app.data.Client import Driver from app.risk.RiskEngine import RiskEngine from tqdm import tqdm import json from app.tools import Utils if __name__ == "__main__": reset_db_name('mine2farm') db = DBAccess(env.DB_RESULT_NAME) db.clear_collection(DB_SENSITIVITY_COLLECTION_NAME) raw_materials_sensitivity = [] raw_materials_df = Driver().get_data("raw_materials") shocks = {} for raw_material in raw_materials_df: item = raw_material["Item"] shocks[item] = 1 #scenarios_df = pd.DataFrame(Driver().get_results(DB_GLOBAL_BEST_RESULT_COLLECTION_NAME)) scenarios_df = pd.read_csv(env.APP_FOLDER + "outputs/global.csv") scenarios_dic = Utils.get_scenario_from_df(scenarios_df) for scenario_id in scenarios_dic: risk_engine = RiskEngine() deltas = risk_engine.compute_delta(scenarios_dic[scenario_id], shocks) deltas['Scenario'] = int(scenario_id) db.save_to_db_no_check(DB_SENSITIVITY_COLLECTION_NAME, deltas)
# -*- coding: utf-8 -*- import os from app.dashboard.Monitor import MONITOR_SERVER from app.config import env import webbrowser from app.data.DBAccess import DBAccess from app.tools.monitor_tools import update_mongo_bi if __name__ == '__main__': # os.environ["FLASK_ENV"] = env.MODE_APP DBAccess("dummy").clear_collection('dummy') DBAccess("dummy").save_to_db_no_check("dummy", [{"dummy": "dummy"}]) webbrowser.open_new_tab("http://%s:%s" % (env.MONITORING_SERVER, env.MONITORING_PORT)) MONITOR_SERVER.run(host=env.MONITORING_SERVER, port=env.MONITORING_PORT, debug=env.MODE_DEBUG)
def __init__(self): self.db = DBAccess(env.DB_RESULT_NAME)