Esempio n. 1
0
    def __init__(self, config={}, verbose=False):
        # check if it's CSC or GEM firmware
        fw_flavor = rw.read_reg("BEFE.SYSTEM.RELEASE.FW_FLAVOR")
        if fw_flavor == 0xdeaddead:
            exit()

        self.gem_csc = fw_flavor.to_string(use_color=False)
        self.is_csc = True if "CSC" in self.gem_csc else False

        if verbose:
            print("%s DAQ configuration:" % self.gem_csc)

        for config_name in self.config_names:
            if config_name in config:
                self.config[config_name] = config[config_name]
            elif utils.config_exists("CONFIG_DAQ_" + config_name):
                self.config[config_name] = utils.get_config("CONFIG_DAQ_" +
                                                            config_name)
            elif utils.config_exists("CONFIG_" + config_name):
                self.config[config_name] = utils.get_config("CONFIG_" +
                                                            config_name)
            else:
                raise Exception("ERROR: config %s not found" % config_name)
            if verbose:
                print("    %s = %d" % (config_name, self.config[config_name]))
Esempio n. 2
0
def main(config_path, env_name, train_mode=True, weights_path=None):
    """Load the environment, create an agent, and train it.
    """
    config = cutils.get_config(config_path)
    env = cutils.load_environment(env_name)
    action_size = env.action_space.n
    state_size = env.observation_space.shape

    memory = ReplayMem(buffer=config['exp_replay']['buffer'])
    av_model = SimpleNN(input_shape=state_size[0], output_shape=action_size)
    policy = EpsGreedy(eps=config['train']['eps_start'],
                       decay=config['train']['eps_decay'],
                       eps_end=config['train']['eps_end'])

    agent = DQN(config,
                seed=0,
                ob_space=state_size[0],
                ac_space=action_size,
                av_model=av_model,
                memory=memory,
                policy=policy)

    if weights_path is not None:
        agent.load(weights_path)

    game_logger = GameLogger(100,
                             10)  # TODO Add winning threshold to arguments
    player = Player(agent=agent,
                    env=env,
                    config=config,
                    game_logger=game_logger,
                    train_mode=train_mode)
    player.play()

    return player.glogger.scores
    def __init__(self, number_simulated_data, generate_baseline_flag=False):

        logging.info("Initialising WeatherDataGen class.")

        # Determine that number_simulated_data is more than 0.
        if number_simulated_data < 1:
            logging.error(
                "The number of simulated data is less than 1. Value: {}.".
                format(number_simulated_data))
            raise ValueError

        # Retrieve configuration data.
        self.config_data = get_config()
        self.__number_simulated_data = number_simulated_data
        self.__generate_baseline_flag = generate_baseline_flag
        self.__locations = [
            get_city(loc) for loc in self.config_data["location"]
        ]
        self.__output_cols = self.config_data["simulation"]["output_columns"]
        self.__date_start_orig = self.config_data["simulation"]["date_start"]
        self.__date_end_orig = self.config_data["simulation"]["date_end"]
        self.__date_start = datetime.datetime.combine(
            self.__date_start_orig, datetime.time.min).timestamp()
        self.__date_end = datetime.datetime.combine(
            self.__date_end_orig, datetime.time.min).timestamp()

        # Get the baseline reference and aggregate file path.
        self.__output_base_reference_file_path = get_file_path(
            folder_name="data",
            subdirectory=self.config_data["gis"]["output_subdirectory"],
            file_name=self.config_data["gis"]
            ["output_base_reference_file_name"])
        self.__output_base_aggregate_file_path = get_file_path(
            folder_name="data",
            subdirectory=self.config_data["gis"]["output_subdirectory"],
            file_name=self.config_data["gis"]
            ["output_base_aggregate_file_name"])

        logging.info("Checking if the baseline data set exists.")

        # Checking if the baseline data set exists.
        if self.__generate_baseline_flag:
            get_gis_historical_data()
            aggregate_gis_historical_data()

        elif not os.path.exists(
                self.__output_base_reference_file_path) or not os.path.exists(
                    self.__output_base_aggregate_file_path):
            logging.info(
                "Baseline data set does not exists. Generating baseline data.")
            get_gis_historical_data()
            aggregate_gis_historical_data()

        else:
            logging.info("Baseline data sets exists.")

        # Reading baseline data sets.
        logging.info("Reading baseline reference data.")
        self.__reference_data = pd.read_csv(
            self.__output_base_reference_file_path)
        logging.info("Completed reading baseline reference data.")

        logging.info("Reading baseline aggregate data.")
        self.__aggregate_data = pd.read_csv(
            self.__output_base_aggregate_file_path)
        logging.info("Completed reading baseline aggregate data.")

        logging.info("Initialising output_data data frame.")

        # Check if the location in the config file reconfiles with the baseline data.
        if len(
                set(self.__reference_data["Location"]).difference(
                    set(self.__locations))) != 0:
            logging.info(
                "Baseline data set does not exists. Generating baseline data.")
            get_gis_historical_data()
            aggregate_gis_historical_data()

        # Initialising output_data dataframe.
        self.output_data = pd.DataFrame(columns=self.__output_cols)

        logging.info("Completed initialising WeatherDataGen class.")
def get_gis_historical_data():
    """This function retrieves the baseline historical weather data
    supplied in 'location key' of config.yaml. It uses Dark Sky API to
    retrieve historical weather data such as temperature, humidity and
    pressure.
    """
    logging.info("Generating baseline reference and historical weather data.")

    # Initialising function variables
    fake = Faker()
    geolocator = Nominatim()
    config_data = get_config()
    locations = config_data["location"]

    # Check if there are no duplicate locations in the config.yaml file.
    if len(locations) != len(set(locations)):
        logging.error(
            "Duplicate location found. Please check config.yaml file.")
        raise ValueError

    # Initialise pandas dataframe column name for baseline reference
    # and historical data.
    df_ref = pd.DataFrame(
        columns=["Location", "Latitude", "Longitude", "Elevation", "Timezone"])
    df_hist = pd.DataFrame(columns=[
        "Location", "Date", "Month", "Temperature_Min", "Temperature_Max",
        "Humidity", "Pressure"
    ])

    # Generate weather data for each location.
    for idx, loc in enumerate(locations):

        logging.info("Retrieving geolocation data for {}.".format(loc))

        # Retrieving geolocation data from geopy library.
        loc_data = geolocator.geocode(loc)

        logging.info("Check if the location {} is valid.".format(loc))
        if loc_data is None:
            logging.error(
                "Invalid location value supplied ({}). Please check config.yaml file."
                .format(loc))
            raise ValueError
        logging.info("The location {} is valid.".format(loc))

        city = get_city(loc)
        lat = loc_data.latitude
        lon = loc_data.longitude

        # Retrieving elevation data for the location.
        elev = get_elevation_data(lat, lon)

        for month in range(1, 13):

            logging.info("Retrieving {} weather data for month {}.".format(
                loc, month))

            for sample in range(config_data["gis"]["sampling_number"]):

                temp_min = None
                temp_max = None
                humidity = None
                pressure = None

                while temp_min is None or temp_max is None or humidity is None or pressure is None:

                    year = random.randint(config_data["gis"]["year_start"],
                                          config_data["gis"]["year_end"])

                    _, last_day = calendar.monthrange(year, month)

                    datetime_start = datetime.datetime(year, month, 1)
                    datetime_end = datetime.datetime(year, month, last_day)

                    date_gen = fake.date_time_between_dates(
                        datetime_start=datetime_start,
                        datetime_end=datetime_end)

                    forecast = forecastio.load_forecast(
                        config_data["forecastio_api_key"],
                        lat,
                        lon,
                        time=date_gen,
                        units="si")

                    historical_data = forecast.json["daily"]["data"][0]

                    timezone = forecast.json.get("timezone", None)
                    temp_min = historical_data.get("temperatureMin", None)
                    temp_max = historical_data.get("temperatureMax", None)
                    humidity = historical_data.get("humidity", None) * 100
                    pressure = historical_data.get("pressure", None)

                df_temp_hist = pd.Series(
                    dict(
                        zip(df_hist.columns, [
                            city, date_gen, date_gen.month, temp_min, temp_max,
                            humidity, pressure
                        ])))

                df_hist = df_hist.append(df_temp_hist, ignore_index=True)

        df_temp_ref = pd.Series(
            dict(zip(df_ref.columns, [city, lat, lon, elev, timezone])))
        df_ref = df_ref.append(df_temp_ref, ignore_index=True)

    logging.info(
        "Generating position to consolidate latitude, longitude and elevation data"
    )
    df_pos = df_ref[["Latitude", "Longitude", "Elevation"]].round(2)
    df_pos["Elevation"] = df_pos["Elevation"].astype(int)
    df_ref["Position"] = df_pos.astype(str).apply(lambda x: ",".join(x),
                                                  axis=1)

    logging.info("Saving baseline reference data.")
    df_ref.to_csv(get_file_path(
        folder_name="data",
        subdirectory=config_data["gis"]["output_subdirectory"],
        file_name=config_data["gis"]["output_base_reference_file_name"]),
                  index=False)
    logging.info("Completed saving baseline reference data.")

    logging.info("Saving baseline historical data.")
    df_hist.to_csv(get_file_path(
        folder_name="data",
        subdirectory=config_data["gis"]["output_subdirectory"],
        file_name=config_data["gis"]["output_base_historical_file_name"]),
                   index=False)
    logging.info("Completed saving baseline historical data.")
def aggregate_gis_historical_data():
    """This function aggregates baseline historical data by location and month
    for the weather parameters:
    - Temperature: Mean for minimum and maximum temperature
    - Humidity: Minimum and maximum humidity
    - Pressure: Minimum and maximum pressure
    """

    logging.info("Processing historical weather data aggregation.")

    # Initialising function variables
    config_data = get_config()

    # Initialise pandas dataframe column name for baseline reference
    # and historical data.
    hist_file_path = get_file_path(
        folder_name="data",
        subdirectory=config_data["gis"]["output_subdirectory"],
        file_name=config_data["gis"]["output_base_historical_file_name"])

    # Define group by columns.
    group_by_cols = ["Location", "Month"]

    # Define aggregate columns.
    aggregate_cols = {
        "Temperature_Min": "mean",
        "Temperature_Max": "mean",
        "Humidity": ["min", "max"],
        "Pressure": ["min", "max"]
    }

    logging.info("Reading historical weather data.")

    # Read baseline historical data.
    df = pd.read_csv(hist_file_path)

    logging.info("Completed reading historical weather data.")

    logging.info("Aggregating historical weather data.")
    df_aggregate = df.groupby(group_by_cols,
                              as_index=False).aggregate(aggregate_cols)
    df_aggregate.columns = [
        "".join(name) for name in df_aggregate.columns.ravel()
    ]
    df_aggregate.rename(columns={
        "Temperature_Minmean": "T_avg_min",
        "Temperature_Maxmean": "T_avg_max",
        "Humiditymin": "H_min",
        "Humiditymax": "H_max",
        "Pressuremin": "P_min",
        "Pressuremax": "P_max"
    },
                        inplace=True)
    df_aggregate[
        "T_avg_range"] = df_aggregate["T_avg_max"] - df_aggregate["T_avg_min"]
    df_aggregate["H_range"] = df_aggregate["H_max"] - df_aggregate["H_min"]
    df_aggregate["P_range"] = df_aggregate["P_max"] - df_aggregate["P_min"]

    logging.info("Saving baseline aggregate data.")
    df_aggregate.to_csv(get_file_path(
        folder_name="data",
        subdirectory=config_data["gis"]["output_subdirectory"],
        file_name=config_data["gis"]["output_base_aggregate_file_name"]),
                        index=False)
    logging.info("Completed saving baseline aggregate data.")
def get_elevation_data(lat, lon):
    """This function returns the elevation based on tif files located
    in the ./data/elevation/*.tif folder using rasterio library.

    Parameters
    ----------
    lat : float
        The latitude of the location.
    lon : float
        The longitude of the location.

    Returns
    -------
    elev : int
         This is the elevation based on supplied coordinates.
    """

    logging.info("Getting elevation data for the coordinate ({}, {}).".format(
        lat, lon))

    # Initialising function variables
    grid_lat = None
    grid_lon = None
    coord = (lon, lat)
    config_data = get_config()["gis"]
    elev_file_name = config_data["input_file_name"]

    logging.info(
        "Determining the appropriate tif file for the coordinate ({}, {}).".
        format(lat, lon))

    # Determine location's latitude data from the image
    # grid. Valid values are 1 and 2.
    for key, value in config_data["latitude_condition"].items():

        if value["min_lat"] <= lat <= value["max_lat"]:
            grid_lat = value["grid_lat"]

    # Determine location's longitude data from the image
    # grid. Valid values are A, B, C and D.
    for key, value in config_data["longitude_condition"].items():

        if value["min_lon"] <= lon <= value["max_lon"]:
            grid_lon = value["grid_lon"]

    # Determine that there is a valid grid_lat and grid_lon data.
    if grid_lat is None or grid_lon is None:
        logging.error(
            "Invalid coordinate ({}, {}). Please check the value!".format(
                lat, lon))
        raise ValueError

    grid_id = "".join([grid_lon, grid_lat])
    file_name = elev_file_name.format(grid_id=grid_id)

    # Retrieve the elevation tif file path based on grid_id.
    elev_file_path = get_file_path(
        folder_name="data",
        subdirectory=config_data["input_subdirectory"],
        file_name=file_name)

    logging.info(
        "Retrieving elevation data for the coordinate ({}, {}) is in {} file.".
        format(lat, lon, file_name))

    # Retrieve the elevation data found in elev_file_path.
    with rio.open(elev_file_path) as file:
        elevs = file.sample((coord, coord))
        elev = next(elevs)[0]

    logging.info(
        "Completed retrieving elevation data for the coordinate ({}, {}). Elevation value: {}."
        .format(lat, lon, elev))

    return elev
Esempio n. 7
0
        :return:
        """
        for key, value in self.config.items():
            self.connections[key] = MongoClient('mongodb://{user}:{passwd}@{host}:{port}/{}'.format(key, **value))

            try:
                self.connections[key].server_info()
            except OperationFailure as e:
                exit(str(e))

        for loader, path, is_pkg in pkgutil.walk_packages([MODELS_PATH], 'models.'):
            if not is_pkg:
                db = path.split('.')[-2]
                if db in self.connections:
                    for class_name, class_def in inspect.getmembers(importlib.import_module(path), inspect.isclass):
                        if issubclass(class_def, MongoBase) and class_def.collection_name:
                            setattr(self, class_def.collection_name, class_def(self.connections[db], db))

    def disconnect(self):
        """
        Disconnect all the available connections

        :return:
        """
        for connection in self.connections.values():
            connection.close()


storage = Storage(get_config()['MONGO'])
    def setUp(self):

        self.config_data = get_config()
Esempio n. 9
0
class LoggingConfig:
    """This class reads the logging YAML configuration file.
    """

    # Retrieve the loggin.yaml data.
    logging_config = get_config(file_name="logging.yaml")