Exemple #1
0
 def __init__(self,
              hospital_specs: HospitalSpecs,
              logger: Optional[logging.Logger] = None):
     super().__init__(hospital_specs=hospital_specs)
     self.logger = logger if logger is not None else get_logger(
         f"{__file__} - Evaluator",
         file_path=LOGGING_DIR.joinpath(
             f"{Path(__file__).resolve().stem}.log"))
Exemple #2
0
 def __init__(self,
              analyser: Analyser,
              logger: Optional[logging.Logger] = None,
              output_dir: Path = OUTPUT_DIR):
     self.analyser = analyser
     self.hospital_specs = self.analyser.hospital_specs
     self.output_dir = output_dir
     self.logger = logger if logger is not None else get_logger(
         "modeller", self.output_dir.joinpath("modeller.log"))
Exemple #3
0
def compute_optimum(specsfile: str, rejection: bool, profit: bool):
    """Analyse different capacity combinations according to the specified
    optimisation problem.

    Take results from specified direcotry.
    """

    with open(specsfile, "r") as f:
        specs = json.load(f)

    output_dir = Path(
        specs["output_dir"]
    ) if specs["output_dir"] is not None else OUTPUT_DIR_OPTIMISATION
    if not output_dir.is_dir():
        output_dir.mkdir()

    input_dir = Path(specs["input_dir"])
    if not input_dir.is_dir():
        raise NotADirectoryError(input_dir)

    timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M")

    logger = get_logger(
        "compute_optimum",
        output_dir.joinpath(f"compute_optimum - {timestamp}.log"))

    optimisation_specs = specs["optimisation_specs"]
    utilisation_constraints = np.array(
        optimisation_specs["utilisation_constraints"])
    bed_costs = np.array(optimisation_specs["bed_costs"])
    rejection_costs = np.array(optimisation_specs["rejection_costs"])
    eps_rejection = optimisation_specs["eps_rejection"]
    eps_profit_rejection = optimisation_specs["eps_profit_rejection"]

    if not (profit or rejection):
        logger.info("No optimisation routine specified.")
        raise ValueError("No optimisation routine specified.")

    results = _get_evaluation_results(Path(input_dir), logger=logger)

    _create_plots(results=results,
                  rejection=rejection,
                  profit=profit,
                  logger=logger,
                  utilisation_constraints=utilisation_constraints,
                  rejection_costs=rejection_costs,
                  bed_costs=bed_costs,
                  eps_rejection=eps_rejection,
                  eps_profit_rejection=eps_profit_rejection,
                  output_dir=output_dir)
Exemple #4
0
    def __init__(self,
                 hospital_specs: HospitalSpecs,
                 simulation_evaluator_specs:
                 SimulationEvaluatorSpecs = SimulationEvaluatorSpecs(),
                 logger: Optional[logging.Logger] = None):

        logger = logger if logger is not None else get_logger(
            f"{__file__} - Simulation Evaluator",
            file_path=LOGGING_DIR.joinpath(
                f"{__file__} - Simulation Evaluator.log"))
        super().__init__(hospital_specs, logger)
        self.specs = simulation_evaluator_specs
        self.runs = 0
        self.error = float("inf")
Exemple #5
0
    def __init__(self,
                 wards: List[WardModel],
                 logger: Optional[logging.Logger] = None):
        self.logger = logger if logger is not None else get_logger(
            "HospitalModel", LOGGING_DIR.joinpath("hospital_model.log"))
        self.wards = {ward.name: ward for ward in wards}

        self.ward_map_inv: Dict[int, str] = {
            i: ward.name
            for i, ward in enumerate(wards)
        }
        self.ward_map = {
            ward: index
            for index, ward in self.ward_map_inv.items()
        }
Exemple #6
0
 def __init__(self,
              filepath: Path = Path(),
              sep: str = ";",
              startdate: datetime = datetime(2018, 1, 1),
              enddate: datetime = datetime(2020, 1, 1),
              datescale: timedelta = timedelta(1),
              logger: Optional[logging.Logger] = None,
              **kwargs: Any) -> None:
     self.data = pd.read_csv(filepath, sep=sep, **kwargs)
     self.startdate = startdate
     self.enddate = enddate
     self.datescale = datescale
     self.data_backup = self.data.copy()
     self.logger = logger if logger is not None else get_logger(
         "Preprocessor", filepath.parent.joinpath("preprocessor.log"))
Exemple #7
0
    def load(filepath: Path = Path(),
             logger: Optional[logging.Logger] = None) -> "HospitalModel":
        """Load the HospitalModel from json via load_dict.

        :param filepath: Filepath to load from.
        :param logger: Logger to use.

        :return: Loaded instance of self.
        """
        if logger is None:
            logger = get_logger("hospital_model",
                                LOGGING_DIR.joinpath("hospital_model.log"))
        with open(filepath, "r") as f:
            arguments = json.load(f)
        arguments["logger"] = logger
        return HospitalModel.load_dict(arguments)
Exemple #8
0
 def __init__(self,
              name: str,
              arrival: np.ndarray,
              service: Service,
              routing: np.ndarray,
              occupancy: np.ndarray,
              capacity: int = 1,
              logger: Optional[logging.Logger] = None):
     self.name = name
     self.arrival = arrival
     self.service = service
     self.routing = routing
     self._occupancy = occupancy
     self.capacity = capacity
     self.logger = logger if logger is not None else get_logger(
         "WardModel", LOGGING_DIR.joinpath("ward_model.log"))
Exemple #9
0
    def __init__(self,
                 hospital_specs: HospitalSpecs,
                 truncation: float = 1.,
                 logger: Optional[logging.Logger] = None):

        logger = logger if logger is not None else get_logger(
            f"{__file__} - Markov Evaluator",
            file_path=LOGGING_DIR.joinpath(
                f"{__file__} - Markov Evaluator.log"))
        super().__init__(hospital_specs, logger=logger)
        if np.any(self.hospital_specs.holdings):
            self.logger.warning(
                "Markov Evaluator cannot handle holding logic, "
                "thus will ignore holdings. Holding information is "
                f"{self.hospital_specs.holdings}.")
        if any(len(w) > 0 for w in self.hospital_specs.waitings.values()):
            self.logger.warning(
                "Markov Evaluator cannot handle waiting logic, "
                "thus will ignore waiting. Waiting information is "
                f"{self.hospital_specs.waitings}.")

        if self.hospital_specs.U != 1:
            self.logger.warning(
                "Markov Evaluator cannot handle multiple classes, "
                f"thus will ignore all classes besides 1. "
                f"Number of classes is {self.hospital_specs.U}")
        if np.any(self.hospital_specs.arrival[:, 0, 1] != 0):
            self.logger.warning(
                "Markov Evaluator cannot handle internal arrival, "
                "thus will ignore this information.")

        self.logger.info(
            "Just taking arrival rate. Do not differentiate between exponential "
            "or hypererlang.")
        self.arrival_rate = self.hospital_specs.arrival_rate[:, 0,
                                                             0].reshape(-1)
        self.service_rate = self.hospital_specs.service_rate[:, 0].reshape(-1)
        # allow truncation, i.e. only observing the upper states.

        self.Q = np.zeros((1, ))
        self.full_pi = np.zeros((1, ))
        self._truncation = 1.
        self.set_truncated(truncation)
        self.pi = np.zeros((self.hospital_specs.capacities.shape[0],
                            self.hospital_specs.capacities.max() + 1))

        self.shape = self.arrival_rate.shape
Exemple #10
0
    def adjust_routing(routing: np.ndarray,
                       capacities: np.ndarray,
                       holdings: List[bool],
                       occupancy: np.ndarray,
                       logger: Optional[logging.Logger] = None) -> np.ndarray:
        """Adjust routing.

        :param routing: Routing to consider.
        :param capacities: Capacities to consider.
        :param holdings: If a ward holds (then do not adjust routing.
        :param occupancy: Occupancy to consider.
        :param logger: Logger to use.

        :return: Adjusted routing matrix.
        """

        if logger is None:
            logger = get_logger("adjust_routing",
                                LOGGING_DIR.joinpath("adjust_routing.log"))
        I = len(capacities)

        # adjust routing!
        routing = routing.copy()
        routing_ = routing.copy()
        for index, val in np.ndenumerate(routing):
            if (not holdings[index[0]]) and (index[2] != routing.shape[2] - 1):
                routing_[index] = val / (
                    1 - occupancy[index[2], capacities[index[2]]])

        routing_[:, :, I, 0] = 1 - routing_[:, :, :I, :].sum(axis=(2, 3))

        if np.any(routing_ < 0) or np.any(routing_ > 1):
            for index in np.ndindex(routing_.shape[:2]):
                if np.any(routing_[index] < 0) or np.any(routing_[index] > 1):
                    logger.warning(
                        f"Routing issue on index: {index}\n"
                        f"Re-adjusting the matrix:\n{routing_[index]}")
                    routing_[index] = np.maximum(routing_[index],
                                                 0) / np.maximum(
                                                     routing_[index], 0).sum()

        return routing_
Exemple #11
0
def adjust_data(filepath: Path,
                wards: List[str],
                startdate: datetime,
                enddate: datetime,
                keep_internal: bool = False,
                logger: Optional[logging.Logger] = None) -> Preprocessor:
    """Adjust already preprocessed data.

    :param filepath: Path to file.
    :param wards: Wards to consider in analysis.
    :param startdate: Startdate to take.
    :param enddate: Enddate to take.
    :param keep_internal: Wether information on internal wards should be kept.
    :param logger: Logger to use.

    :return: Preprocessor instance with preprocessed data.
    """

    if logger is None:
        logger = get_logger(__file__,
                            file_path=filepath.parent.joinpath(
                                f"{Path(__file__).resolve().stem}.log"))

    preprocessor = Preprocessor(filepath=filepath, logger=logger)
    preprocessor.split_data()

    preprocessor.logger.info("Adjust start- and enddate.")
    preprocessor.alter_begin(startdate)
    preprocessor.alter_end(enddate)

    preprocessor.logger.info("Restrict wards if necessary.")
    preprocessor.restrict_ward(wards)

    if not keep_internal:
        preprocessor.logger.info(
            "Drop information on internal wards not under consideration.")
        preprocessor.drop_internal()

    return preprocessor
Exemple #12
0
    def __init__(self,
                 hospital_specs: HospitalSpecs,
                 logger: Optional[logging.Logger] = None):

        logger = logger if logger is not None else get_logger(
            f"{__file__} - Analytic Evaluator",
            file_path=LOGGING_DIR.joinpath(
                f"{__file__} - Analytic Evaluator.log"))
        super().__init__(hospital_specs, logger=logger)
        if np.any(self.hospital_specs.holdings):
            self.logger.warning(
                "Analytic Evaluator cannot handle holding logic, "
                "thus will ignore holdings. Holding information is "
                f"{self.hospital_specs.holdings}.")
        if any(len(w) > 0 for w in self.hospital_specs.waitings.values()):
            self.logger.warning(
                "Analytic Evaluator cannot handle waiting logic, "
                "thus will ignore waiting. Waiting information is "
                f"{self.hospital_specs.waitings}.")
        if np.any(self.hospital_specs.arrival[:, :, 1] != 0):
            self.logger.warning(
                "Analytic Evaluator cannot handle internal arrival, "
                "thus will ignore this information.")
        self.arrival_rate = self.hospital_specs.arrival_rate[:, :, 0]
        self.service_rate = self.hospital_specs.service_rate[:, :self.
                                                             hospital_specs.U]
        self.nz_service_rate = self.service_rate.copy()
        self.nz_service_rate[self.nz_service_rate == 0] = 1
        self.shape = self.arrival_rate.shape
        self.alpha_lin = np.zeros_like(self.arrival_rate)
        self.alpha_res = np.zeros_like(self.arrival_rate)
        self.alpha_res_success = False
        self.alpha_res_message = None
        self.pi_lin = np.zeros((self.service_rate.shape[0],
                                self.hospital_specs.capacities.max() + 1),
                               dtype="float")
        self.pi_res = np.zeros((self.service_rate.shape[0],
                                self.hospital_specs.capacities.max() + 1),
                               dtype="float")
Exemple #13
0
    def __init__(self,
                 hyper: List[float],
                 paramno: Optional[int] = None,
                 logger: Optional[logging.Logger] = None,
                 **kwargs: Union[np.ndarray, List[float]]):
        if kwargs.get("lambd"):
            lambd = np.asarray(kwargs.pop("lambd"))
            kwargs["scale"] = 1 / lambd

        super().__init__(scipy.stats.erlang, hyper, **kwargs)

        if paramno is not None:
            self.paramno = paramno

        self.lambd = 1 / np.asarray(kwargs["scale"]).reshape(-1)

        self.a = np.asarray(kwargs["a"]).reshape(-1)

        self.convergence_error = np.inf
        self.log_likelihood_fit = -np.inf

        self.logger = logger if logger is not None else get_logger(
            "hypererlang_distribution",
            LOGGING_DIR.joinpath("hypererlang_distribution.log"))
Exemple #14
0
def make_hospital_model(
        filepath: Path,
        wards: List[str],
        capacities: List[int],
        startdate: datetime = datetime(2019, 1, 1),
        enddate: datetime = datetime(2019, 12, 1),
        cart_specs: Optional[CartSpecs] = None,
        hypererlang_specs: Optional[HypererlangSpecs] = None,
        adjust_pacu_occupancy: bool = True,
        output_dir: Path = OUTPUT_DIR,
        logger: Optional[logging.Logger] = None) -> List[HospitalModel]:
    """Make hospital model.

    :param filepath: Path to excel file to analyse.
    :param wards: Wards to consider and their respective capacities.
    :param capacities: Capacities for wards.
    :param startdate: Startdate to use.
    :param enddate: Enddate to use.
    :param cart_specs: Specifications for CART analysis.
    :param hypererlang_specs: Specifications for hypererlang fit.
    :param adjust_pacu_occupancy: Adjust pacu occupancy because of weekends.
    :param output_dir: Output_dir to use for plots.
    :param logger: Logger to use for logging.

    :return: Created HospitalModels.
    """
    timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
    output_dir = output_dir.joinpath("Modelling - " + timestamp)
    output_dir.mkdir()

    if logger is None:
        logger = get_logger(__file__,
                            file_path=output_dir.joinpath(
                                f"{Path(__file__).resolve().stem}.log"))

    if hypererlang_specs is None:
        hypererlang_specs = HypererlangSpecs()

    if cart_specs is None:
        cart_specs = CartSpecs(wards=wards)

    preprocessor = adjust_data(filepath=filepath,
                               wards=wards,
                               startdate=startdate,
                               enddate=enddate,
                               keep_internal=True,
                               logger=logger)

    analyser = analyse(preprocessor=preprocessor,
                       wards=wards,
                       capacities=capacities,
                       output_dir=output_dir,
                       adjust_pacu_occupancy=adjust_pacu_occupancy,
                       logger=logger)
    plt.close()

    hospital_models: List[HospitalModel] = []
    for cart in [False, True]:
        modeller = Modeller(analyser=analyser,
                            logger=logger,
                            output_dir=output_dir)

        c = "multiple classes" if cart else "one class"
        modeller.logger.info(f"Build model for {c}.")

        modeller = model_class_arrival_routing(modeller=modeller,
                                               make_cart=cart,
                                               cart_specs=cart_specs)
        plt.close()

        hospital_specs_service = modeller.service_fit(
            distributions=[
                fit_expon,
                lambda x: fit_hypererlang(x, specs=hypererlang_specs)
            ],
            filename=f"service_fit - cart[{cart}]")
        plt.close()

        ward_models = []
        for ward in wards:
            ward_index = hospital_specs_service[0].ward_map_inv[ward]
            arrival = hospital_specs_service[0].arrival[ward_index:ward_index +
                                                        1]
            service = Service(
                expon=hospital_specs_service[0].service[ward_index:ward_index +
                                                        1],
                hypererlang=hospital_specs_service[1].
                service[ward_index:ward_index + 1])
            occupancy = np.array(modeller.analyser.occupancies[0][ward])
            routing = hospital_specs_service[0].routing[ward_index, :, :-1, :]
            ward_models.append(
                WardModel(name=ward,
                          arrival=arrival,
                          service=service,
                          routing=routing,
                          occupancy=occupancy))
            plt.close()

        hospital_model = HospitalModel(wards=ward_models)
        filename = f"HospitalModel - cart[{cart}].json"
        hospital_model.save(filepath=output_dir.joinpath(filename))

        modeller.logger.info(
            f"Model for {c} saved in {output_dir.joinpath(filename)}.")

        hospital_models.append(hospital_model)
    return hospital_models
Exemple #15
0
def simulate_optimum(specsfile: str, model: str, waiting: bool,
                     rejection: bool, profit: bool):
    """Analyse different capacity combinations according to the specified
    optimisation problem."""

    with open(specsfile, "r") as f:
        specs = json.load(f)

    output_dir = Path(
        specs["output_dir"]
    ) if specs["output_dir"] is not None else OUTPUT_DIR_OPTIMISATION
    if not output_dir.is_dir():
        output_dir.mkdir()

    timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M")

    logger = get_logger(
        "simulate_optimum",
        output_dir.joinpath(f"simulate_optimum - {timestamp}.log"))

    if specs["modelfile"] is None:
        logger.info(
            f"No model file given. Recursing to default models from {EXAMPLE_MODEL_NO_CART}."
        )
        modelfile = EXAMPLE_MODEL_NO_CART
    else:
        modelfile = Path(specs["modelfile"])

    if not modelfile.is_file():
        raise FileNotFoundError(modelfile)

    wards = specs["wards"]
    capacities = specs["capacities"]
    adjust_int_rates = specs["adjust_int_rates"]
    service_name = specs["service_name"]
    if service_name not in ["expon", "hypererlang"]:
        raise ValueError(
            f"service_name has to be one of [expon, hypererlang]. Current value: {service_name}."
        )
    waitings = specs["waitings"] if waiting else dict()
    simulation_evaluator_specs = SimulationEvaluatorSpecs(**specs["DES_specs"])
    optimisation_specs = specs["optimisation_specs"]
    lower_capacities = np.array(optimisation_specs["lower_capacities"])
    upper_capacities = np.array(optimisation_specs["upper_capacities"])
    utilisation_constraints = np.array(
        optimisation_specs["utilisation_constraints"])
    bed_costs = np.array(optimisation_specs["bed_costs"])
    rejection_costs = np.array(optimisation_specs["rejection_costs"])
    eps_rejection = optimisation_specs["eps_rejection"]
    eps_profit_rejection = optimisation_specs["eps_profit_rejection"]

    results: List[EvaluationResults] = []

    combinations = np.prod(upper_capacities - lower_capacities + 1)
    logger.info(f"Start simulation of possible capacity combinations. "
                f"# combinations={combinations}.")

    ward_capacity = dict(zip(wards, capacities))

    hospital_model = HospitalModel.load(filepath=modelfile, logger=logger)
    hospital_specs = hospital_model.get_model(
        model=int(model),
        capacities=ward_capacity,
        service_name=service_name,
        adjust_int_rates=adjust_int_rates,
        waitings=waitings)

    if not (profit or rejection):
        logger.info("No optimisation routine specified.")
        raise ValueError("No optimisation routine specified.")

    timestamp = datetime.datetime.now().strftime("%Y%m%d_%H%M%S")
    results_dir = output_dir.joinpath("Simulation results - " + timestamp)
    results_dir.mkdir()

    for i, index in enumerate(
            np.ndindex(*(upper_capacities - lower_capacities + 1))):
        capacities_ = lower_capacities + np.array(index)

        ward_capacity = dict(zip(wards, capacities_))

        hospital_specs.set_capacities(**ward_capacity)

        logger.info(
            f"Simulate model on capacities {capacities_}. {i + 1} of {combinations}"
        )

        simulation_evaluator = SimulationEvaluator(
            hospital_specs=hospital_specs,
            simulation_evaluator_specs=simulation_evaluator_specs,
            logger=logger)
        simulation_evaluator.evaluate()

        key = f"{modelfile.name},model:{model},service:{service_name},waiting:{waiting}"
        simulation_evaluator.name = key

        results.append(simulation_evaluator)

        simulation_evaluator.save(
            results_dir.joinpath(
                f"simulation_result-capacities"
                f"{list(simulation_evaluator.hospital_specs.capacities)}.json")
        )

    _create_plots(results=results,
                  rejection=rejection,
                  profit=profit,
                  logger=logger,
                  utilisation_constraints=utilisation_constraints,
                  rejection_costs=rejection_costs,
                  bed_costs=bed_costs,
                  eps_rejection=eps_rejection,
                  eps_profit_rejection=eps_profit_rejection,
                  output_dir=output_dir)
Exemple #16
0
def preprocess(filepath: List[Path],
               startdate: datetime,
               enddate: datetime,
               ward_key_map: Dict[str, List[str]],
               internal_prefix: List[str],
               urgency_split_string: str,
               birth_format: str,
               flow_split_string: str,
               timestamp_format: str,
               logger: Optional[logging.Logger] = None) -> Preprocessor:
    """Preprocess given excel file.

    :param filepath: Path to file.
    :param startdate: Startdate to take.
    :param enddate: Enddate to take.
    :param ward_key_map: Map for hospital wards to their respective keys.
    Exp.: Sometimes, multiple keys are referring to one ward. Map them.
    :param internal_prefix: Prefix associated with internal wards.
    Exp.: All wards in one hospital could be found by a common prefix.
    Alternatively, a list of all wards to be considered as internal can be provided.
    :param urgency_split_string: Where urgency should be splitted.
    :param birth_format: Format of birthday column.
    :param flow_split_string: Where flow should be splitted.
    :param timestamp_format: Format for timestamp to use.
    :param logger: Logger to use.

    :return: Preprocessor instance with preprocessed data.
    """

    if logger is None:
        logger = get_logger(__file__,
                            file_path=filepath[0].parent.joinpath(
                                f"{Path(__file__).resolve().stem}.log"))

    preprocessor = Preprocessor(filepath[0],
                                startdate=startdate,
                                enddate=enddate,
                                logger=logger)

    for file in filepath[1:]:
        preprocessor.add(file)

    preprocessor.logger.info(
        f"Succesfully imported data with shape {preprocessor.data.shape} "
        f"and columns\n{list(preprocessor.data.columns)}.")

    preprocessor.logger.info("Make patient flow.")
    preprocessor.make_flow(split_str=flow_split_string)

    preprocessor.logger.info("Clean from empty rows.")
    preprocessor.clean_empty()

    preprocessor.logger.info("Replace ward keys by mapping.")
    preprocessor.replace_ward_keys(ward_map=ward_key_map,
                                   internal_prefix=internal_prefix)

    preprocessor.logger.info("Make urgency (N0,...,N5) if existent.")
    preprocessor.make_urgency(split_str=urgency_split_string)

    preprocessor.logger.info("Clean data. This may take a while.")
    preprocessor.clean_data()
    preprocessor.logger.info("Finished cleaning.")

    preprocessor.logger.info("Make age from birth if existent.")
    preprocessor.make_age(format=birth_format)

    preprocessor.logger.info("Make timestamps to float.")
    preprocessor.datetimes_to_float(format=timestamp_format)

    preprocessor.data.loc[:, PRE_CLASS] = 0
    preprocessor.data.loc[:, CURRENT_CLASS] = 0
    preprocessor.data.loc[:, POST_CLASS] = 0

    preprocessor.logger.info("Prune data at beginning.")
    preprocessor.prune_begin()

    preprocessor.logger.info("Prune data at end.")
    preprocessor.prune_end()

    return preprocessor