def run(self): try: data = DataManager(DbManager()).prepare_necessary_data() self.signals.result.emit(data) except DataManagerError as e: self.signals.error.emit(e) get_logger("main_program").exception(e) finally: self.signals.finished.emit()
from main_window import UiMainWindow from PyQt5 import QtCore, QtGui, QtWidgets from data_manager import DataManager from db_manager import DbManager, DbManagerError import logging_setup import sys if __name__ == "__main__": """ Runs main application. """ app = QtWidgets.QApplication(sys.argv) main_window = QtWidgets.QMainWindow() main_logger = logging_setup.get_logger("main_program") try: data_base = DbManager() data_manager = DataManager(data_base) ui = UiMainWindow(data_manager, main_logger) ui.setup_ui(main_window) main_window.show() except DbManagerError as e: main_logger.exception(e) sys.exit(app.exec_())
def test_logging_setup(): logger = logging_setup.get_logger("api_flow") assert logger.name == "api_flow" assert logger.level == logging.DEBUG
import datetime import logging_setup logger = logging_setup.get_logger("data_parsing") class DataProcessingError(Exception): pass class DataProcessor: """It is responsible for processing data from the API to suitable format for database. If error is occurred during parsing data it is being ignored and then logged into data_parsing.log """ API_DATE_FORMAT = "%Y-%m-%d %H:%M:%S" @classmethod def parse_cities(cls, data): """ Parses data from API/stations for cities. :type data: list :rtype: list """ cities = [] for station in data: try: city = station["city"] if city: cities.append((int(city["id"]), str(city["name"]).title(),
from db import sql2df from logging_setup import get_logger import argparse import pandas as pd import datetime import numpy as np import re log = get_logger("export_network") TOP_N_CREDITORS = 100 MAX_DATE = datetime.date(2019, 1, 1) def export_network(network_type, nodes_tsv, edges_tsv): creditors_df = extract_creditors() creditor_ids = list(creditors_df.id) insolvencies_df = extract_insolvencies(creditor_ids) debtors_df = insolvencies2debtors(insolvencies_df) insolvency_end_dates_df = extract_insolvency_end_dates() nodes_df = pd.DataFrame([], columns=["id", "name", "node_type"]) nodes_df = nodes_df.append(creditors_df).append(debtors_df) edges_df = pd.DataFrame([], columns=[ "source_id", "target_id", "edge_type", "start_date", "end_date" ]) log.info("Extracting network type = {}".format(network_type)) if network_type == "debtor->creditor":
import requests import logging_setup import datetime from db_manager import DbManager, DbManagerError from data_processor import DataProcessor, DataProcessingError logger = logging_setup.get_logger("api_flow") class DataManagerError(Exception): pass class ApiError(Exception): pass class DataManager: """Responsible for all data flow in application.""" MAX_CACHE_INTERVAL = 600 URL_BASE = "http://api.gios.gov.pl/pjp-api/rest" URL_STATIONS = '/station/findAll' URL_SENSORS = "/station/sensors" URL_DATA = "/data/getData" def __init__(self, db_obj): """ :type db_obj: DbManager """ self.db = db_obj