예제 #1
0
파일: config.py 프로젝트: H0R5E/dtocean-app
def init_config(logging=False, files=False, install=False, overwrite=False):
    """Copy config files to user data directory"""

    if not any([logging, files, install]): return

    objdir = ObjDirectory(__name__, "..", "config")
    datadir = UserDataDirectory("dtocean_app", "DTOcean", "config")
    dirmap = DirectoryMap(datadir, objdir)

    if logging: dirmap.copy_file("logging.yaml", overwrite=overwrite)
    if files: dirmap.copy_file("files.ini", overwrite=overwrite)
    if install: dirmap.copy_file("install.ini", overwrite=overwrite)

    return datadir.get_path()
예제 #2
0
def get_database_config(db_config_name="database.yaml"):

    userconfigdir = UserDataDirectory("dtocean_core", "DTOcean", "config")
    useryaml = ReadYAML(userconfigdir, db_config_name)
    
    if userconfigdir.isfile(db_config_name):
        configdir = userconfigdir
    else:
        configdir = ObjDirectory("dtocean_core", "config")
    
    configyaml = ReadYAML(configdir, db_config_name)
    config = configyaml.read()
            
    return useryaml, config
예제 #3
0
파일: config.py 프로젝트: H0R5E/dtocean-app
def get_install_paths():
    """Pick the necessary paths to configure the external files for the 
    manuals."""

    install_config_name = "install.ini"

    user_data = UserDataDirectory("dtocean_doc", "DTOcean", "config")
    user_ini_reader = ReadINI(user_data, install_config_name)

    # Get the root path from the site data path.
    site_data = SiteDataDirectory("DTOcean Manuals", "DTOcean")
    site_ini_reader = ReadINI(site_data, install_config_name)

    if user_ini_reader.config_exists():
        config = user_ini_reader.get_config()
    elif site_ini_reader.config_exists():
        config = site_ini_reader.get_config()
    else:
        return None

    path_dict = {
        "man_user_path": config["man"]["user_path"],
        "man_technical_path": config["man"]["technical_path"]
    }

    return path_dict
예제 #4
0
def get_install_paths(install_config_name="install.ini"):
    """Pick the necessary paths to configure the external files for the transit
    route."""

    source_dir = ObjDirectory(__name__, "config")
    user_data = UserDataDirectory("dtocean_logistics", "DTOcean", "config")
    user_data_map = DirectoryMap(user_data, source_dir)
    user_data_map.safe_copy_file(install_config_name,
                                 "{}.txt".format(install_config_name))
    user_ini_reader = ReadINI(user_data_map, install_config_name)

    # Get the root path from the site data path.
    site_data = SiteDataDirectory("DTOcean Logistics", "DTOcean")
    site_ini_reader = ReadINI(site_data, install_config_name)

    if user_ini_reader.config_exists():
        config = user_ini_reader.get_config()
    elif site_ini_reader.config_exists():
        config = site_ini_reader.get_config()
    else:
        errStr = ("No suitable configuration file found at paths "
                  "{} or {}").format(site_ini_reader.get_config_path(),
                                     user_ini_reader.get_config_path())
        raise RuntimeError(errStr)

    path_dict = {
        "logistics_include": config["dtocean_logistics"]["include_path"]
    }

    return path_dict
예제 #5
0
def start_logging():
    """Start python logger"""

    # Pick up the configuration from the user directory if it exists
    userdir = UserDataDirectory("dtocean_core", "DTOcean", "config")

    # Look for files.ini
    if userdir.isfile("files.ini"):
        configdir = userdir
    else:
        configdir = ObjDirectory("dtocean_core", "config")

    files_ini = ReadINI(configdir, "files.ini")
    files_config = files_ini.get_config()

    appdir_path = userdir.get_path("..")
    log_folder = files_config["logs"]["path"]
    log_path = os.path.join(appdir_path, log_folder)
    logdir = Directory(log_path)

    # Look for logging.yaml
    if userdir.isfile("logging.yaml"):
        configdir = userdir
    else:
        configdir = ObjDirectory("dtocean_core", "config")

    log = Logger(configdir)
    log_config_dict = log.read()

    # Update the file logger if present
    if "file" in log_config_dict["handlers"]:
        log_filename = log_config_dict["handlers"]["file"]["filename"]
        log_path = logdir.get_path(log_filename)
        log_config_dict["handlers"]["file"]["filename"] = log_path
        logdir.makedir()

    log.configure_logger(log_config_dict)
    logger = log.add_named_logger("dtocean_core")

    # Rotate any rotating file handlers
    for handler in logger.handlers:
        if handler.__class__.__name__ == 'RotatingFileHandler':
            handler.doRollover()

    logger.info("Begin logging for dtocean_core")

    return
예제 #6
0
def start_logging(debug=False):
    
    # Pick up the configuration from the user directory if it exists
    userdir = UserDataDirectory("dtocean_app", "DTOcean", "config")
            
    if userdir.isfile("files.ini") and userdir.isfile("logging.yaml"):
        configdir = userdir
    else:
        configdir = ObjDirectory("dtocean_app", "config")
    
    files_ini = ReadINI(configdir, "files.ini")
    files_config = files_ini.get_config()
    
    appdir_path = userdir.get_path("..")
    log_folder = files_config["logs"]["path"]
    log_path = os.path.join(appdir_path, log_folder)
    logdir = Directory(log_path)
    
    # Disable the logging QtHandler if the debug flag is set
    QtHandler.debug = debug
    
    log = Logger(configdir)
    log_config_dict = log.read()
    
    # Update the file logger if present
    if "file" in log_config_dict["handlers"]:
        log_filename = log_config_dict["handlers"]["file"]["filename"]
        log_path = logdir.get_path(log_filename)
        log_config_dict["handlers"]["file"]["filename"] = log_path
        logdir.makedir()
    
    log.configure_logger(log_config_dict)
    logger = log.add_named_logger("dtocean_app")
    
    # Rotate any rotating file handlers
    for handler in logger.handlers:
        if handler.__class__.__name__ == 'RotatingFileHandler':
            try:
                handler.doRollover()
            except WindowsError:
                pass
            
    logger.info("Welcome to DTOcean")
    
    return
예제 #7
0
def get_log_dir():

    userdir = UserDataDirectory("dtocean_app", "DTOcean", "config")

    # Look for files.ini
    if userdir.isfile("files.ini"):
        configdir = userdir
    else:
        configdir = ObjDirectory("dtocean_app", "config")

    files_ini = ReadINI(configdir, "files.ini")
    files_config = files_ini.get_config()

    appdir_path = userdir.get_path("..")
    log_folder = files_config["logs"]["path"]
    log_path = os.path.join(appdir_path, log_folder)
    logdir = Directory(log_path)

    return logdir
예제 #8
0
def start_logging(debug=False):

    # Disable the logging QtHandler if the debug flag is set
    QtHandler.debug = debug

    # Pick up the configuration from the user directory if it exists
    userdir = UserDataDirectory("dtocean_app", "DTOcean", "config")

    # Look for logging.yaml
    if userdir.isfile("logging.yaml"):
        configdir = userdir
    else:
        configdir = ObjDirectory("dtocean_app", "config")

    # Get the logger configuration
    log = Logger(configdir)
    log_config_dict = log.read()

    # Get Directory to place logs
    log_dir = get_log_dir()

    # Update the file logger if present
    if "file" in log_config_dict["handlers"]:
        log_filename = log_config_dict["handlers"]["file"]["filename"]
        log_path = log_dir.get_path(log_filename)
        log_config_dict["handlers"]["file"]["filename"] = log_path
        log_dir.makedir()

    log.configure_logger(log_config_dict)
    logger = log.add_named_logger("dtocean_app")

    # Rotate any rotating file handlers
    for handler in logger.handlers:
        if handler.__class__.__name__ == 'RotatingFileHandler':
            try:
                handler.doRollover()
            except WindowsError:
                pass

    logger.info("Welcome to DTOcean")

    return
예제 #9
0
def start_logging(level=None):
    """Start python logger"""

    objdir = ObjDirectory(__name__, "config")
    datadir = UserDataDirectory("dtocean_hydro", "DTOcean", "config")
    dirmap = DirectoryMap(datadir, objdir)

    log = Logger(dirmap)
    log("dtocean_hydro",
        level=level,
        info_message="Begin logging for dtocean_hydro.")
예제 #10
0
def get_config(config_name="configuration.ini", valid_name="validation.ini"):
    """Pick the necessary paths to configure the external files for the wave
    and tidal packages."""

    source_dir = ObjDirectory(__name__, "config")
    user_data = UserDataDirectory("dtocean_dummy", "DTOcean", "config")
    user_data_map = DirectoryMap(user_data, source_dir)

    user_ini_reader = ReadINI(user_data_map, config_name, valid_name)
    user_ini_reader.copy_config()

    # Collect the configuration data
    config = user_ini_reader.get_valid_config()

    return config
예제 #11
0
def init_config(install=False, overwrite=False):
    
    """Copy config files to user data directory"""
    
    objdir = ObjDirectory(__name__, "config")
    datadir = UserDataDirectory("dtocean_app", "DTOcean", "config")
    dirmap = DirectoryMap(datadir, objdir)
    
    dirmap.copy_file("logging.yaml", overwrite=overwrite)
    dirmap.copy_file("files.ini", overwrite=overwrite)
    
    # Copy the manuals installation configuration
    if install:
        dirmap.copy_file("install.ini", overwrite=overwrite)
    
    return
예제 #12
0
def get_install_paths():
    
    """Pick the necessary paths to configure the external files for the wave
    and tidal packages."""
    
    source_dir = ObjDirectory(__name__, "config")
    user_data = UserDataDirectory("dtocean_hydro", "DTOcean", "config")
    user_data_map = DirectoryMap(user_data, source_dir)
    
    install_src_name = "install.ini"
    
    # Check for bundled indicator file
    if source_dir.isfile(".bundled"):
        install_dst_name = "install_bundled.ini"
    else:
        install_dst_name = "install.ini"
    
    log_msg = ("Install configuration file name set to "
               "'{}'").format(install_dst_name)
    module_logger.debug(log_msg)
    
    user_data_map.safe_copy_file(install_src_name,
                                 "{}.txt".format(install_dst_name))
    user_ini_reader = ReadINI(user_data_map, install_dst_name)
    
    # Get the root path from the site data path.
    site_data = SiteDataDirectory("DTOcean Hydrodynamics", "DTOcean")
    site_ini_reader = ReadINI(site_data, install_dst_name)
    
    if user_ini_reader.config_exists():
        config = user_ini_reader.get_config()
    elif site_ini_reader.config_exists():
        config = site_ini_reader.get_config()
    else:
        errStr = ("No suitable configuration file found at paths "
                  "{} or {}").format(site_ini_reader.get_config_path(),
                                     user_ini_reader.get_config_path())
        raise RuntimeError(errStr)

    path_dict = {"bin"              : config["dtocean_wec"]["bin_path"],
                 "wec_include"      : config["dtocean_wec"]["include_path"],
                 "tidal_include"    : config["dtocean_tidal"]["include_path"]
                 }

    return path_dict
예제 #13
0
    def connect(self, debug_entry=False, export_data=True):
        '''The connect method is used to execute the external program and 
        populate the interface data store with values.
        
        Note:
          Collecting data from the interface for use in the external program
          can be accessed using self.data.my_input_variable. To put new values
          into the interface once the program has run we set
          self.data.my_output_variable = value
        
        '''

        system_type_map = {
            "Tidal Floating": "tidefloat",
            "Tidal Fixed": "tidefixed",
            "Wave Floating": "wavefloat",
            "Wave Fixed": "wavefixed"
        }
        system_type = system_type_map[self.data.device_type_user]

        input_dict = self.get_input_dict(self.data,
                                         self.data.network_configuration_user)

        if input_dict is None: return

        mission_time_hours = self.data.mission_time * 365. * 24.

        if self.data.expected_mttf is None:
            mttfreq_hours = None
        else:
            mttfreq_hours = self.data.expected_mttf * 365. * 24.

        input_dict["system_type"] = system_type
        input_dict["mission_time_hours"] = mission_time_hours
        input_dict["mttfreq_hours"] = mttfreq_hours

        if export_data:

            userdir = UserDataDirectory("dtocean_core", "DTOcean", "config")

            if userdir.isfile("files.ini"):
                configdir = userdir
            else:
                configdir = ObjDirectory("dtocean_core", "config")

            files_ini = ReadINI(configdir, "files.ini")
            files_config = files_ini.get_config()

            appdir_path = userdir.get_path("..")
            debug_folder = files_config["debug"]["path"]
            debug_path = os.path.join(appdir_path, debug_folder)
            debugdir = Directory(debug_path)
            debugdir.makedir()

            pkl_path = debugdir.get_path("reliability_inputs.pkl")
            pickle.dump(input_dict, open(pkl_path, "wb"))

        input_variables = Variables(
            input_dict["mission_time_hours"], input_dict["system_type"],
            input_dict["compdict"], input_dict["mttfreq_hours"],
            input_dict["network_configuration"],
            input_dict["electrical_network_hier"],
            input_dict["electrical_network_bom"],
            input_dict["moor_found_network_hier"],
            input_dict["moor_found_network_bom"], input_dict["user_hier"],
            input_dict["user_bom"])

        main = Main(input_variables)

        if debug_entry: return

        year_hours = 24. * 365.25

        mttf, self.data.rsystime = main()

        self.data.mttf = mttf / year_hours
        self.data.mttf_test = main.mttfpass

        if self.data.network_configuration_user == "Radial":
            network_configuration = "radial"
        elif self.data.network_configuration_user == "Star":
            network_configuration = "multiplehubs"
        else:
            network_configuration = None

        ram_df = read_RAM(main.rsubsysvalues2, main.rsubsysvalues3,
                          network_configuration)

        metrics_map = {
            "system id [-]": "System ID",
            "failure rate [1/10^6 hours]": "Failure Rate",
            "MTTF [hours]": "MTTF"
        }

        if self.data.electrical_network is not None:

            metrics = ram_df[(ram_df["system id [-]"] == "-")
                             & (ram_df["subsystem id [-]"] == "Substation")]
            failure_rate = metrics["failure rate [1/10^6 hours]"].iloc[0]
            mttf = metrics["MTTF [hours]"].iloc[0] / year_hours

            self.data.substation_reliability = {
                "Failure Rate": [failure_rate],
                "MTTF": [mttf]
            }

            metrics = ram_df[(ram_df["system id [-]"] == "-")
                             & (ram_df["subsystem id [-]"] == "Export Cable")]

            failure_rate = metrics["failure rate [1/10^6 hours]"].iloc[0]
            mttf = metrics["MTTF [hours]"].iloc[0] / year_hours

            self.data.export_cable_reliability = {
                "Failure Rate": [failure_rate],
                "MTTF": [mttf]
            }

            metrics = ram_df[(ram_df["system id [-]"].str.contains("subhub"))
                             & (ram_df["subsystem id [-]"] == "Substation")]

            if not metrics.empty:

                metrics_df = metrics.loc[:, ("system id [-]",
                                             "failure rate [1/10^6 hours]",
                                             "MTTF [hours]")]
                metrics_df[:, "MTTF [hours]"] /= year_hours

                metrics_df = metrics_df.rename(columns=metrics_map)
                metrics_df = metrics_df.reset_index(drop=True)

                self.data.hub_reliability = metrics_df

            metrics = ram_df[ram_df["subsystem id [-]"].str.lower().str.
                             contains("elec sub-system")]

            if not metrics.empty:

                metrics_df = metrics.loc[:, ("system id [-]",
                                             "failure rate [1/10^6 hours]",
                                             "MTTF [hours]")]
                metrics_df.loc[:, "MTTF [hours]"] /= year_hours

                metrics_df = metrics_df.rename(columns=metrics_map)
                metrics_df = metrics_df.reset_index(drop=True)

                self.data.inter_cable_reliability = metrics_df

        if self.data.moor_found_network is not None:

            metrics = ram_df[ram_df["subsystem id [-]"].str.contains(
                "mooring foundation")]

            if not metrics.empty:

                metrics_df = metrics.loc[:, ("system id [-]",
                                             "failure rate [1/10^6 hours]",
                                             "MTTF [hours]")]
                metrics_df.loc[:, "MTTF [hours]"] /= year_hours

                metrics_df = metrics_df.rename(columns=metrics_map)
                metrics_df = metrics_df.reset_index(drop=True)

                self.data.moorings_reliability = metrics_df

            metrics = ram_df[ram_df["subsystem id [-]"].str.contains(
                "dynamic cable")]

            if not metrics.empty:

                metrics_df = metrics.loc[:, ("system id [-]",
                                             "failure rate [1/10^6 hours]",
                                             "MTTF [hours]")]
                metrics_df.loc[:, "MTTF [hours]"] /= year_hours

                metrics_df = metrics_df.rename(columns=metrics_map)
                metrics_df = metrics_df.reset_index(drop=True)

                self.data.umbilical_cable_reliability = metrics_df

        # Patch double counting of umbilical cable
        if (self.data.inter_cable_reliability is not None
                and self.data.umbilical_cable_reliability is not None):

            self.data.inter_cable_reliability["Failure Rate"] -= \
                    self.data.umbilical_cable_reliability["Failure Rate"]

            hours_to_years = 1e6 / 24 / 365.25
            mttf = [
                hours_to_years / rate
                for rate in self.data.inter_cable_reliability["Failure Rate"]
            ]
            self.data.inter_cable_reliability["MTTF"] = mttf

        return
예제 #14
0
    def connect(self, debug_entry=False, export_data=True):
        '''The connect method is used to execute the external program and 
        populate the interface data store with values.
        
        Note:
          Collecting data from the interface for use in the external program
          can be accessed using self.data.my_input_variable. To put new values
          into the interface once the program has run we set
          self.data.my_output_variable = value
        
        '''

        #   #------------------------------------------------------------------------------
        #   #------------------------------------------------------------------------------
        #   #------------------ WP2 site data class
        #   #------------------------------------------------------------------------------
        #   #------------------------------------------------------------------------------
        #   Sitedata class: The class contains all the information relative to the area of deployment
        #	 of the array.
        #
        #	 Args:
        #	    LeaseArea (numpy.ndarray) [m]: UTM coordinates of the lease area poligon expressed as [X,Y].
        #		                           X is the column vector containing the easting coordinates
        #		                           Y is the column vector containing the northing coordinates
        #		NogoAreas (list) [m]: list containing the UTM coordinates of the nogo areas poligons expressed as [X,Y].
        #		MeteoceanConditions (dict): dictionary gathering all the information related to the metocean conditions
        #		                            of the site. The dictionary is different for wave and tidal cases:
        #									Wave keys:
        #									   'Te' (numpy.ndarray)[s]: Vector containing the wave energy periods
        #										'Hs' (numpy.ndarray)[m]: Vector containing the significant wave height
        #										'dir' (numpy.ndarray)[rad]: Vector containing the wave direction
        #										'p' (numpy.ndarray)[-]: Probability of occurence of the sea state
        #										'specType' (tup): description of the spectral shape recorded at the site
        #																	(spectrum name, gamma factor, spreading parameter)
        #										'SSH' (float)[m]: Sea Surface Height wrt the bathymetry datum at a single point
        #									Tidal keys:
        #									   'V' (numpy.ndarray)[m/s]: northing-direction of the velocity field
        #										'U' (numpy.ndarray)[m/s]: easting-direction of the velocity field
        #										'p' (numpy.ndarray)[-]: probability of occurency of the state
        #										'TI' (numpy.ndarray)[-]: Turbulence intensity. It can be a single float or a matrix where the
        #											      TI is specified at each grid node.
        #										'x' (numpy.ndarray)[m]: Vector containing the easting coordinate of the grid nodes
        #										'y' (numpy.ndarray)[m]: Vector containing the northing coordinate of the grid nodes
        #										'SSH' (numpy.ndarray)[m]: Sea Surface Height wrt the bathymetry datum
        #
        #
        #		VelocityShear (numpy.ndarray) [-]: Tidal velocity shear formula (power law), used to evaluate the vertical velocity profile
        #		Main_Direction (numpy.ndarray, optional) [m]: xy vector defining the main orientation of the array. If not provided it will be
        #														assessed from the Metocean conditions.
        #		Bathymetry (numpy.ndarray) [m]: Describes the vertical profile of the sea bottom at each (given) UTM coordinate.
        #										Expressed as [X,Y,Z]
        #		Geophysics (numpy.ndarray) [?]: Describes the sea bottom geophysic characteristic at each (given) UTM coordinate.
        #										Expressed as [X,Y,Geo]
        #		BR (float) [-]: describes the ratio between the lease area surface over the site area surface enclosed in a channel.
        #						1. - closed channel
        #						0. - open sea
        #		electrical_connection_point (numpy.ndarray) [m]: UTM coordinates of the electrical connection point at the shore line
        #        boundary_padding (float, optional) [m]: Padding added to inside of the lease area in which devices may not be placed

        # Check whether the bin width divides the RP perfectly
        check_bin_widths(self.data.rated_power_device, self.data.pow_bins)

        if 'Tidal' in self.data.type:

            x = self.data.tidal_series.coords["UTM x"]
            y = self.data.tidal_series.coords["UTM y"]

            tide_dict = {
                "U": self.data.tidal_series.U.values,
                "V": self.data.tidal_series.V.values,
                "SSH": self.data.tidal_series.SSH.values,
                "TI": self.data.tidal_series.TI.values,
                "x": x.values,
                "y": y.values,
                "t": self.data.tidal_series.t.values,
                "xc": self.data.tidal_occurrence_point.x,
                "yc": self.data.tidal_occurrence_point.y,
                "ns": self.data.tidal_nbins
            }

            occurrence_matrix = make_tide_statistics(tide_dict)

            p_total = sum(occurrence_matrix['p'])

            if not np.isclose(p_total, 1.):

                errStr = ("Tidal statistics probabilities invalid. Total "
                          "probability equals {}").format(p_total)
                raise ValueError(errStr)

            occurrence_matrix_coords = [
                occurrence_matrix['x'], occurrence_matrix['y'],
                occurrence_matrix['p']
            ]

            matrix_xset = {
                "values": {
                    "U": occurrence_matrix['U'],
                    "V": occurrence_matrix['V'],
                    "SSH": occurrence_matrix['SSH'],
                    "TI": occurrence_matrix['TI']
                },
                "coords": occurrence_matrix_coords
            }

            self.data.tidal_occurrence = matrix_xset

            x = self.data.geophysics.coords["UTM x"]
            y = self.data.geophysics.coords["UTM y"]

            # Flatten mannings number
            xgrid, ygrid = np.meshgrid(x.values, y.values)
            geogrid = self.data.geophysics.values.T
            geoflat = np.array(
                zip(xgrid.flatten(), ygrid.flatten(), geogrid.flatten()))

        else:

            occurrence_matrix = make_wave_statistics(self.data.wave_series)

            p_total = occurrence_matrix['p'].sum()

            if not np.isclose(p_total, 1.):

                errStr = ("Wave statistics probabilities invalid. Total "
                          "probability equals {}").format(p_total)
                raise ValueError(errStr)

            occurrence_matrix_coords = [
                occurrence_matrix['Te'], occurrence_matrix['Hs'],
                occurrence_matrix['B']
            ]
            matrix_xgrid = {
                "values": occurrence_matrix['p'],
                "coords": occurrence_matrix_coords
            }

            self.data.wave_occurrence = matrix_xgrid

            # Translate spectrum type
            spectrum_map = {
                "Regular": "Regular",
                "Pierson-Moskowitz": "Pierson_Moskowitz",
                "JONSWAP": "Jonswap",
                "Bretschneider": "Bretschneider_Mitsuyasu",
                "Modified Bretschneider": "Modified_Bretschneider_Mitsuyasu"
            }

            spectrum_type = spectrum_map[self.data.spectrum_type_farm]

            spectrum_list = (spectrum_type, self.data.spectrum_gamma_farm,
                             self.data.spectrum_dir_spreading_farm)

            occurrence_matrix["SSH"] = 0.  # Datum is mean sea level
            occurrence_matrix["specType"] = spectrum_list

            geoflat = None

        # Snap lease area to bathymetry
        bathy_x = self.data.bathymetry["x"]
        bathy_y = self.data.bathymetry["y"]
        bathy_box = box(bathy_x.min(), bathy_y.min(), bathy_x.max(),
                        bathy_y.max())

        lease_area = self.data.lease_area
        sane_lease_area = lease_area.intersection(bathy_box)

        # Convert lease and nogo polygons
        numpy_lease = np.array(sane_lease_area.exterior.coords[:-1])

        if self.data.nogo_areas is None:
            numpy_nogo = None
        else:
            numpy_nogo = [
                np.array(x.exterior.coords[:-1])
                for x in self.data.nogo_areas.values()
            ]

        numpy_landing = np.array(self.data.export_landing_point.coords[0])

        # Bathymetry (**assume layer 1 in uppermost**)
        zv = self.data.bathymetry["depth"].sel(layer="layer 1").values.T
        xv, yv = np.meshgrid(self.data.bathymetry["x"].values,
                             self.data.bathymetry["y"].values)
        xyz = np.dstack([xv.flatten(), yv.flatten(), zv.flatten()])[0]
        safe_xyz = xyz[~np.isnan(xyz).any(axis=1)]

        # Convert main direction to vector
        if self.data.main_direction is None:
            main_direction_vec = None
        else:
            main_direction_tuple = bearing_to_vector(self.data.main_direction)
            main_direction_vec = np.array(main_direction_tuple)

        Site = WP2_SiteData(numpy_lease, numpy_nogo, occurrence_matrix, 7.,
                            main_direction_vec, safe_xyz, geoflat,
                            self.data.blockage_ratio, numpy_landing,
                            self.data.boundary_padding)

        #	#------------------------------------------------------------------------------
        #	#------------------------------------------------------------------------------
        #	#------------------ WP2 machine data class
        #	#------------------------------------------------------------------------------
        #	#------------------------------------------------------------------------------
        #
        #    MachineData class: The class contains all the information relative to the machine
        #	deployed in the array.
        #
        #	Args:
        #		Type (str)[-]: defines the type of device either 'tidal' or 'wave'. No other strings are accepted
        #		lCS (numpy.ndarray)[m]: position vector of the local coordina system from the given reference point.
        #								Wave: represents the position vector of the body CS wrt the mesh CS
        #								Tidal: represents the position of the hub from the machine (reference) CS.
        #		Clen (numpy.ndarray)[m]: characteristic lenght of the device:
        #						 Wave: unused
        #						 Tidal: turbine diameter and distance of the hub from the center line
        #						        used in case the machine is composed by two parallel turbines
        #		YawAngle (float)[rad]: Yaw angle span, wrt the main direction of the array.
        #							The total yawing range is two-fold the span. -Yaw/+Yaw
        #		Float_flag (bool)[-]: defines whether the machine is floating (True) or not (False)
        #		InstalDepth (list)[m]: defines the min and maximum water depth at which the device can be installed
        #		MinDist (tuple)[m]: defines the minimum allowed distance between devices in the array configuration
        #		                    the first element is the distance in the x axis
        #		                    the second element is the distance in the y axis
        #		OpThreshold (float)[-]: defines the minimum allowed q-facto
        #		UserArray (dict): dictionary containing the description of the array layout to be optimise. Keys:
        #						'Option' (int): 1-optimisation over the internal parametric array layouts
        #										2-fixed array layout specified by the user not subject to optimisation
        #										3-array layout specified by the user subject to optimisation via expantion of the array
        #						'Value' options:
        #								(str) 'rectangular'
        #								(str) 'staggered'
        #								(str) 'full'
        #								(numpy.ndarray) [m]: [X,Y] coordiantes of the device
        #        RatedPowerArray (float)[W]: Rated power of the array.
        #        RatedPowerDevice (float)[W]: Rated power of the single isolated device.
        #        UserOutputTable (dict, optional): dictionary of dictionaries where all the array layouts inputed and analysed by the user are
        #								  collected. Using this option the internal WP2 calculation is skipped, and the optimisaton
        #								  is performed in the given data. The dictionaies keys are the arguments of the WP2 Output class.
        #        wave_data_folder (string, optional): path name of the hydrodynamic results generate by the wave external module
        #        tidal_power_curve (numpy.ndarray, optional)[-]: Power curve function of the stream velocity
        #        tidal_thrust_curve (numpy.ndarray, optional)[-]: Thrust curve function of the stream velocity
        #        tidal_velocity_curve (numpy.ndarray, optional)[m/s]: Vector containing the stream velocity
        #        tidal_cutinout (numpy.ndarray, optional): contain the cut_in and cut_out velocity of the turbine.
        #								                        Outside the cut IN/OUT velocity range the machine will not produce
        #								                        power. The generator is shut down, but the machine will still interact
        #								                        with the others.
        #        tidal_bidirectional (bool, optional): bidirectional working principle of the turbine
        #        tidal_data_folder (string, optional): Path to tidal device CFD data files

        yaw_angle = np.radians(self.data.yaw_angle)
        min_install = self.data.min_install
        max_install = self.data.max_install
        min_dist = (self.data.min_dist_x, self.data.min_dist_y)
        op_threshold = self.data.op_threshold
        install_depth = (min_install, max_install)

        if 'Tidal' in self.data.type:

            perf_velocity = self.data.perf_curves.index.values
            cp_curve = self.data.perf_curves["Coefficient of Power"].values
            ct_curve = self.data.perf_curves["Coefficient of Thrust"].values
            cut_in = self.data.cut_in
            cut_out = self.data.cut_out

            dev_type = "Tidal"
            lCS = [0, 0, self.data.hub_height]
            clen = (self.data.rotor_diam, self.data.turbine_interdist)
            wave_data_folder = None
            tidal_power_curve = cp_curve
            tidal_thrust_curve = ct_curve
            tidal_velocity_curve = perf_velocity
            tidal_cutinout = (cut_in, cut_out)
            tidal_bidirectional = self.data.bidirection
            tidal_data_folder = self.data.tidal_data_directory

        else:

            dev_type = "Wave"
            lCS = None
            clen = None
            wave_data_folder = self.data.wave_data_directory
            tidal_power_curve = None
            tidal_thrust_curve = None
            tidal_velocity_curve = None
            tidal_cutinout = None
            tidal_bidirectional = None
            tidal_data_folder = None

        # Set user_array_dict value key
        if self.data.user_array_option in [
                "User Defined Flexible", "User Defined Fixed"
        ]:

            if self.data.user_array_layout is None:

                errStr = ("A predefined array layout must be provided when "
                          "using the '{}' array layout option").format(
                              self.data.user_array_option)
                raise ValueError(errStr)

            numpy_layout = np.array(
                [point.coords[0][:2] for point in self.data.user_array_layout])

            user_array_dict = {'Value': numpy_layout}

        else:

            user_array_dict = {'Value': self.data.user_array_option.lower()}

        # Set user_array_dict option key
        if self.data.user_array_option == "User Defined Flexible":
            user_array_dict['Option'] = 3
        elif self.data.user_array_option == "User Defined Fixed":
            user_array_dict['Option'] = 2
        else:
            user_array_dict['Option'] = 1

        if 'Floating' in self.data.type:
            float_flag = True
        else:
            float_flag = False

        Machine = WP2_MachineData(
            dev_type,
            lCS,
            clen,
            yaw_angle,
            float_flag,
            install_depth,
            min_dist,
            op_threshold,
            user_array_dict,
            self.data.rated_array_power * 1e6,  # Watts
            self.data.rated_power_device * 1e6,  # Watts
            None,
            wave_data_folder,
            tidal_power_curve,
            tidal_thrust_curve,
            tidal_velocity_curve,
            tidal_cutinout,
            tidal_bidirectional,
            tidal_data_folder)

        iWP2input = WP2input(Machine, Site)

        if export_data:

            userdir = UserDataDirectory("dtocean_core", "DTOcean", "config")

            if userdir.isfile("files.ini"):
                configdir = userdir
            else:
                configdir = ObjDirectory("dtocean_core", "config")

            files_ini = ReadINI(configdir, "files.ini")
            files_config = files_ini.get_config()

            appdir_path = userdir.get_path("..")
            debug_folder = files_config["debug"]["path"]
            debug_path = os.path.join(appdir_path, debug_folder)
            debugdir = Directory(debug_path)
            debugdir.makedir()

            pkl_path = debugdir.get_path("hydrodynamics_inputs.pkl")
            pickle.dump(iWP2input, open(pkl_path, "wb"))

        if debug_entry: return

        if not iWP2input.stopWP2run:
            main = WP2(iWP2input, pickup=True, debug=False)

            result = main.optimisationLoop()

            if result == -1:
                errStr = "Hydrodynamics module failed to execute successfully."
                raise RuntimeError(errStr)

        if export_data:

            pkl_path = debugdir.get_path("hydrodynamics_outputs.pkl")
            pickle.dump(result, open(pkl_path, "wb"))

        AEP_per_device = {}
        pow_per_device = {}
        pmf_per_device = {}
        layout = {}
        q_factor_per_device = {}
        dev_ids = []

        # Layout
        for dev_id, coords in result.Array_layout.iteritems():
            dev_id = dev_id.lower()
            layout[dev_id] = np.array(coords)
            dev_ids.append(dev_id)

        self.data.device_position = layout
        self.data.n_bodies = int(result.Nbodies)

        # Total annual energy (convert to MWh)
        self.data.AEP_array = \
                       float(result.Annual_Energy_Production_Array) / 1e6

        # Array capacity factor
        ideal_energy = (365 * 24 * self.data.n_bodies *
                        self.data.rated_power_device)
        self.data.array_efficiency = self.data.AEP_array / ideal_energy

        # Annual energy per device (convert to MWh)
        for dev_id, AEP in zip(dev_ids, result.Annual_Energy_Production_perD):
            AEP_per_device[dev_id] = float(AEP) / 1e6  #SimpleDIct

        self.data.AEP_per_device = AEP_per_device

        # Mean power per device (convert to MW)
        for dev_id, power in zip(dev_ids, result.power_prod_perD):
            pow_per_device[dev_id] = float(power) / 1e6  #SimpleDIct

        self.data.pow_per_device = pow_per_device

        for dev_id, pow_per_state in zip(dev_ids, result.power_prod_perD_perS):

            # Power probability mass function (convert to MW)
            flat_prob = occurrence_matrix['p'].flatten("F")
            pow_list = pow_per_state / 1e6

            assert np.isclose(flat_prob.sum(), 1.)
            assert len(flat_prob) == len(pow_list)

            # Find uniques powers
            unique_powers = []

            for power in pow_list:

                if not np.isclose(power, unique_powers).any():
                    unique_powers.append(power)

            # Catch any matching powers and sum the probabilities
            powers = []
            probs = []

            match_index_check = []

            for power in unique_powers:

                matches = np.isclose(power, pow_list)
                assert len(matches) >= 1
                match_idx = np.where(matches == True)
                match_probs = flat_prob[match_idx]
                match_index_check.extend(match_idx[0].tolist())

                powers.append(power)
                probs.append(match_probs.sum())

                # Nullify the found indexes to ensure uniqueness
                pow_list[match_idx] = np.nan
                flat_prob[match_idx] = np.nan

            repeated_indexes = set([
                x for x in match_index_check if match_index_check.count(x) > 1
            ])

            assert len(repeated_indexes) == 0
            assert np.isclose(sum(probs), 1.)

            pmf_per_device[dev_id] = np.array(zip(powers, probs))

        # Power probability histograms
        dev_pow_hists = make_power_histograms(pmf_per_device,
                                              self.data.rated_power_device,
                                              self.data.pow_bins)

        self.data.pow_pmf_per_device = pmf_per_device
        self.data.pow_hist_per_device = dev_pow_hists

        # Resource modification
        self.data.q_factor_per_device = q_factor_per_device
        self.data.q_factor_array = result.q_factor_Array

        self.data.resource_reduction = float(result.Resource_Reduction)

        for dev_id, q_factor in zip(dev_ids, result.q_factor_Per_Device):
            q_factor_per_device[dev_id] = q_factor

        # Main Direction
        self.data.main_direction = vector_to_bearing(*result.main_direction)

        # Device type specific outputs
        if 'Wave' in self.data.type:

            # External forces
            fex_dict = result.Hydrodynamic_Parameters
            modes = np.array(fex_dict["mode_def"])
            freqs = np.array(fex_dict["wave_fr"])

            # Convert directions to bearings
            bearings = [radians_to_bearing(x) for x in fex_dict["wave_dir"]]
            dirs = np.array(bearings)

            fex_raw = np.zeros(
                [len(modes), len(freqs), len(dirs)], dtype=complex) * np.nan

            for i, mode_fex in enumerate(fex_dict["fex"]):
                if mode_fex:
                    fex_raw[i, :, :] = np.expand_dims(mode_fex, axis=1)

            fex_xgrid = {"values": fex_raw, "coords": [modes, freqs, dirs]}

            self.data.ext_forces = fex_xgrid

            ## Power Matrix in kW
            power_matrix = result.power_matrix_machine / 1000.
            power_matrix_dims = result.power_matrix_dims

            # Convert directions to bearings
            bearings = [
                radians_to_bearing(x) for x in power_matrix_dims["dirs"]
            ]

            occurrence_matrix_coords = [
                power_matrix_dims['te'], power_matrix_dims['hm0'], bearings
            ]

            matrix_xgrid = {
                "values": power_matrix,
                "coords": occurrence_matrix_coords
            }

            self.data.power_matrix = matrix_xgrid

        return
예제 #15
0
def test_UserDataDirectory():
    
    test = UserDataDirectory("test", "test")
    path = test.get_path()
    
    assert isinstance(path, basestring)