def setUpClass(cls):
        token = environ.get('KB_AUTH_TOKEN', None)
        config_file = environ.get('KB_DEPLOYMENT_CONFIG', None)
        cls.cfg = {}
        config = _ConfigParser()
        config.read(config_file)

        for nameval in config.items('UIService'):
            cls.cfg[nameval[0]] = nameval[1]

        # Getting username from Auth profile for token
        authServiceUrl = cls.cfg['auth-service-url']
        auth_client = _KBaseAuth(authServiceUrl)
        user_id = auth_client.get_user(token)
        # WARNING: don't call any logging methods on the context object,
        # it'll result in a NoneType error
        cls.ctx = MethodContext(None)
        cls.ctx.update({
            'token':
            token,
            'user_id':
            user_id,
            'provenance': [{
                'service': 'UIService',
                'method': 'please_never_use_it_in_production',
                'method_params': []
            }],
            'authenticated':
            1
        })

        cls.serviceImpl = UIService(cls.cfg)
        cls.scratch = cls.cfg['scratch']
        cls.callback_url = os.environ['SDK_CALLBACK_URL']
Esempio n. 2
0
    def read(self, fileobj, ignore_errors=False):
        """
        Reads the configuration from the file object.

        If ``ignore_errors`` is ``True`` and a section or option starts with
        a number, a :exc:`IOError` exception is raised.
        If not, these sections or options are skipped.

        :arg fileobj: file object
        :arg ignore_errors: whether to raise exception or skip invalid
            section(s) and option(s)
        """
        parser = _ConfigParser()
        parser.read_file(fileobj)

        for section in parser.sections():
            if section[0].isdigit():
                if ignore_errors:
                    continue
                raise IOError("Section name (%s) cannot start with a digit" % section)

            options = {}

            for option in parser.options(section):
                if option[0].isdigit():
                    if ignore_errors:
                        continue
                    raise IOError("Option name (%s) cannot start with a digit" % option)

                options[option] = parser.get(section, option)

            self.__dict__[section] = _Section(options)
Esempio n. 3
0
def _read_inifile(
        file=_os.environ.get(  # @ReservedAssignment
            "KB_DEPLOYMENT_CONFIG", _os.environ["HOME"] + "/.kbase_config")):
    # Another bandaid to read in the ~/.kbase_config file if one is present
    authdata = None
    if _os.path.exists(file):
        try:
            config = _ConfigParser()
            config.read(file)
            # strip down whatever we read to only what is legit
            authdata = {
                x: config.get("authentication", x) if config.has_option(
                    "authentication", x) else None
                for x in (
                    "user_id",
                    "token",
                    "client_secret",
                    "keyfile",
                    "keyfile_passphrase",
                    "password",
                )
            }
        except Exception as e:
            print("Error while reading INI file %s: %s" % (file, e))
    return authdata
Esempio n. 4
0
    def parse_args(self, *args, **kwargs):
        """Parse the arguments as usual, then add default processing."""
        if _debug: ConfigArgumentParser._debug("parse_args")

        # pass along to the parent class
        result_args = ArgumentParser.parse_args(self, *args, **kwargs)

        # read in the configuration file
        config = _ConfigParser()
        config.read(result_args.ini)
        if _debug: _log.debug("    - config: %r", config)

        # check for BACpypes section
        if not config.has_section('BACpypes'):
            raise RuntimeError("INI file with BACpypes section required")

        # convert the contents to an object
        ini_obj = type('ini', (object,), dict(config.items('BACpypes')))
        if _debug: _log.debug("    - ini_obj: %r", ini_obj)

        # add the object to the parsed arguments
        setattr(result_args, 'ini', ini_obj)

        # return what was parsed
        return result_args
Esempio n. 5
0
    def from_file(cls, fname):
        if not os.path.exists(fname):
            raise FileNotFoundError(fname)

        config_parser = _ConfigParser()
        config_parser.read(fname)
        sections = config_parser._sections
        return cls(sections)
Esempio n. 6
0
def get_config(filepath=None):
    if filepath is None:
        filepath = _get_config_filepath()
    config = _ConfigParser()
    config.read(filepath)
    config_dict = {s: dict(config.items(s)) for s in config.sections()}

    return config_dict
Esempio n. 7
0
def create_config_file(settings=None):
    filepath = _get_config_filepath()
    if path.isfile(filepath):
        return

    config = _ConfigParser()
    config['vexbot'] = {'kill_on_exit': False, 'profile': 'default'}

    config['vexbot_ports'] = _get_default_port_config()

    with open(filepath, 'w') as f:
        config.write(f)
Esempio n. 8
0
    def write(self, fileobj):
        """
        Writes the configuration inside the file object.

        :arg fileobj: file object
        """
        parser = _ConfigParser()

        # Add sections
        for section_name in self.__dict__:
            parser.add_section(section_name)

        # Add options
        for section_name, option_name, value in self:
            parser.set(section_name, option_name, str(value))

        parser.write(fileobj)
Esempio n. 9
0
def _read_inifile(
    file=_os.environ.get("KB_DEPLOYMENT_CONFIG", _os.environ["HOME"] + "/.kbase_config")  # @ReservedAssignment
):
    # Another bandaid to read in the ~/.kbase_config file if one is present
    authdata = None
    if _os.path.exists(file):
        try:
            config = _ConfigParser()
            config.read(file)
            # strip down whatever we read to only what is legit
            authdata = {
                x: config.get("authentication", x) if config.has_option("authentication", x) else None
                for x in ("user_id", "token", "client_secret", "keyfile", "keyfile_passphrase", "password")
            }
        except Exception as e:
            print("Error while reading INI file {}: {}".format(file, e))
    return authdata
Esempio n. 10
0
def _read_inifile(file=_os.environ.get(  # @ReservedAssignment
                  'KB_DEPLOYMENT_CONFIG', _os.environ['HOME'] +
                  '/.kbase_config')):
    # Another bandaid to read in the ~/.kbase_config file if one is present
    authdata = None
    if _os.path.exists(file):
        try:
            config = _ConfigParser()
            config.read(file)
            # strip down whatever we read to only what is legit
            authdata = {x: config.get('authentication', x)
                        if config.has_option('authentication', x)
                        else None for x in ('user_id', 'token',
                                            'client_secret', 'keyfile',
                                            'keyfile_passphrase', 'password')}
        except Exception as e:
            print('Error while reading INI file {}: {}'.format(file, e))
    return authdata
Esempio n. 11
0
def _read_inifile(file=_os.environ.get(  # @ReservedAssignment
                  'KB_DEPLOYMENT_CONFIG', _os.environ['HOME'] +
                  '/.kbase_config')):
    # Another bandaid to read in the ~/.kbase_config file if one is present
    authdata = None
    if _os.path.exists(file):
        try:
            config = _ConfigParser()
            config.read(file)
            # strip down whatever we read to only what is legit
            authdata = {x: config.get('authentication', x)
                        if config.has_option('authentication', x)
                        else None for x in ('user_id', 'token',
                                            'client_secret', 'keyfile',
                                            'keyfile_passphrase', 'password')}
        except Exception as e:
            print('Error while reading INI file {}: {}'.format(file, e))
    return authdata
 def _create_deploy_cfg(self, temp_dir, ws_temp_dir, mongo_host, mongo_db,
                        mongo_type_db, auth_url):
     cp = _ConfigParser()
     cp['Workspace'] = {
         'mongodb-host': mongo_host,
         'mongodb-database': mongo_db,
         'mongodb-type-database': mongo_type_db,
         'backend-type': 'GridFS',
         'auth-service-url': auth_url + '/api/legacy/KBase',
         'auth-service-url-allow-insecure': 'true',
         'auth2-service-url':
         auth_url + '/',  # TODO WS should not be necessary
         'temp-dir': str(ws_temp_dir),
         'ignore-handle-service': 'true',
         'auth2-ws-admin-read-only-roles': 'WS_READ_ADMIN',
         'auth2-ws-admin-full-roles': 'WS_FULL_ADMIN'
     }
     f = temp_dir / 'test.cfg'
     with open(f, 'w') as inifile:
         cp.write(inifile)
     return f
Esempio n. 13
0
 def setUpClass(cls):
     configfile = os.path.abspath(os.path.dirname(
         os.path.abspath(__file__)) + '/../test.cfg')
     print('Loading test config from ' + configfile)
     cfg = _ConfigParser()
     cfg.read(configfile)
     authurl = cfg.get(cls.CFG_SEC, cls.AUTHURL)
     cls.token1 = cfg.get(cls.CFG_SEC, cls.TOKEN1)
     cls.token2 = cfg.get(cls.CFG_SEC, cls.TOKEN2)
     if not authurl:
         raise ValueError('Missing {} from test config'.format(cls.AUTHURL))
     if not cls.token1:
         raise ValueError('Missing {} from test config'.format(cls.TOKEN1))
     if not cls.token2:
         raise ValueError('Missing {} from test config'.format(cls.TOKEN2))
     print('Authorization url: ' + authurl)
     cls.user1 = cls.get_user(authurl, cls.token1)
     cls.user2 = cls.get_user(authurl, cls.token2)
     if cls.user1 == cls.user2:
         raise ValueError('{} and {} users are the same: {}'.format(
             cls.TOKEN1, cls.TOKEN2, cls.user1))
     cls.kba = KBaseAuth(authurl)
Esempio n. 14
0
 def setUpClass(cls):
     configfile = os.path.abspath(
         os.path.dirname(os.path.abspath(__file__)) + '/../test.cfg')
     print('Loading test config from ' + configfile)
     cfg = _ConfigParser()
     cfg.read(configfile)
     authurl = cfg.get(cls.CFG_SEC, cls.AUTHURL)
     cls.token1 = cfg.get(cls.CFG_SEC, cls.TOKEN1)
     cls.token2 = cfg.get(cls.CFG_SEC, cls.TOKEN2)
     if not authurl:
         raise ValueError('Missing {} from test config'.format(cls.AUTHURL))
     if not cls.token1:
         raise ValueError('Missing {} from test config'.format(cls.TOKEN1))
     if not cls.token2:
         raise ValueError('Missing {} from test config'.format(cls.TOKEN2))
     print('Authorization url: ' + authurl)
     cls.user1 = cls.get_user(authurl, cls.token1)
     cls.user2 = cls.get_user(authurl, cls.token2)
     if cls.user1 == cls.user2:
         raise ValueError('{} and {} users are the same: {}'.format(
             cls.TOKEN1, cls.TOKEN2, cls.user1))
     cls.kba = KBaseAuth(authurl)
Esempio n. 15
0
    def expand_args(self, result_args):
        """Take the result of parsing the args and interpret them."""
        if _debug: ConfigArgumentParser._debug("expand_args %r", result_args)

        # read in the configuration file
        config = _ConfigParser()
        config.read(result_args.ini)
        if _debug: _log.debug("    - config: %r", config)

        # check for BACpypes section
        if not config.has_section('BACpypes'):
            raise RuntimeError("INI file with BACpypes section required")

        # convert the contents to an object
        ini_obj = Settings(dict(config.items('BACpypes')))
        if _debug: _log.debug("    - ini_obj: %r", ini_obj)

        # add the object to the parsed arguments
        setattr(result_args, 'ini', ini_obj)

        # continue with normal expansion
        ArgumentParser.expand_args(self, result_args)
Esempio n. 16
0
def csv_nist_import_data(pth, filename_header, filename_data,
                    output_cls_instance=None):
    """
    Import dataset(s) from HDF file

    Parameters
    ----------
    pth : str
        Path

    filename_header : str
        File name of header
        
    filename_data : str
        File name of data

    output_cls_instance : crikit.data.spectrum.Spectrum (or subclass)
        Spectrum class (or sub) object

    Returns
    -------
        Success : bool
            Success of import
        Data, Meta : list (ndarray, dict)
            If output_cls_instance is None and import is successful, will \
            return the data from dset_list and associated meta data.

    """
    # Join path and filename in an os-independant way
    pfname_header = _os.path.normpath(_os.path.join(pth, filename_header))
    pfname_data = _os.path.normpath(_os.path.join(pth, filename_data))

    valid_import_locs = 0
    
    try:
        with open(pfname_header,'r') as _:
            pass
    except:
        print('Invalid header filename')
    else:
        valid_import_locs += 1
        
    try:
        with open(pfname_data,'r') as _:
            pass
    except:
        print('Invalid data filename')
    else:
        valid_import_locs += 1
        
    if valid_import_locs != 2:
        return False
    else:
        try:
            config = _ConfigParser()
            config.read(pfname_header)
            
            # Frequency calibration
            probe = config.getfloat('Frequency Calibration','probe(nm)')
            wl1 = config.getfloat('Frequency Calibration','wavelength1(nm)')
            wl2 = config.getfloat('Frequency Calibration','wavelength2(nm)')
            pixel1 = config.getint('Frequency Calibration','pixel1')
            pixel2 = config.getint('Frequency Calibration','pixel2')
            f_begin = config.getfloat('Frequency Calibration','freq index begin')
            f_size = config.getint('Frequency Calibration','freq index length')
            f_min = config.getfloat('Frequency Calibration','freq Min')
            f_max = config.getfloat('Frequency Calibration','freq Max')
            
            pix = _np.linspace(pixel1,pixel2,f_size)
            wl = _np.linspace(wl1,wl2,f_size)
            wn = 1e7*(1/wl - 1/probe)
            
            # Config is apparently backwards so flip order
            pix = _np.flipud(pix)
            wl = _np.flipud(wl)
            wn = _np.flipud(wn)
            
            # Frequency calibration stuff
            wl_center = wl.mean()
            wl_slope, wl_intercept = _np.polyfit(pix, wl,1)
            
                
            # Get data
            with open(pfname_data,'r') as csvfile:
                reader = _csv.reader(csvfile, delimiter='\t')
                data = []
                for count in reader:
                    data.append(count)
            data = _np.array(data).astype(float)
            
            if (data.shape[-1] == 3) | (data.shape[-1] == 2):  # Spectra
                print('Spectra')
                wn = data[:,0]
                temp = data[:,-1]
                if data.shape[-1] == 3:
                    wl = data[:,1]
                data = temp
                
                # Meta data
                attr = {}
                for each_section in config.sections():
                    #print('Section: {}'.format(each_section))
                    for (each_key, each_val) in config.items(each_section):
                        k = each_section + '.' + each_key
                        try:  # int
                            v = int(each_val)
                            #print('Integer')
                        except:
                            try:  # float
                                v = float(each_val)
                            except:  # string
                                v = str.strip(each_val,'"')
                        #print('{}.{}: {}'.format(each_section,each_key, v))
                        attr.update({k:v})
                    
                # Add in frequency calibration info
                attr['Frequency Calibration.Slope'] = wl_slope
                attr['Frequency Calibration.Intercept'] = wl_intercept
                attr['Frequency Calibration.CenterWavelength'] = wl_center
    
                # Convert meta keys to match those of HDF5 version
                # Note: will not overwrite, just add-to
                # Note: Subject to change
                
                output_cls_instance.data = data
                output_cls_instance.meta = attr
                return True
                
            else:
                data = data.reshape((data.shape[0],-1,f_size))
                # Spatial Info
                x_start = config.getfloat('X scan Parameters','X start (um)')
                x_steps = config.getint('X scan Parameters','X steps')
                x_step_size = config.getfloat('X scan Parameters','X step size (um)')
                x = _np.linspace(x_start, x_start + x_step_size * (x_steps-1), x_steps)
    
                try:  # Exists a typo in header info in LabView program
                    y_start = config.getfloat('Y scan Paramters','Y start (um)')
                    y_steps = config.getint('Y scan Paramters','Y steps')
                    y_step_size = config.getfloat('Y scan Paramters','Y step size (um)')
                    y = _np.linspace(y_start, y_start + y_step_size * (y_steps-1), y_steps)
                except:  # In case typo is corrected in the future
                    y_start = config.getfloat('Y scan Parameters','Y start (um)')
                    y_steps = config.getint('Y scan Parameters','Y steps')
                    y_step_size = config.getfloat('Y scan Parameters','Y step size (um)')
                    y = _np.linspace(y_start, y_start + y_step_size * (y_steps-1), y_steps)
                
                # Meta data
                attr = {}
                for each_section in config.sections():
                    #print('Section: {}'.format(each_section))
                    for (each_key, each_val) in config.items(each_section):
                        k = each_section + '.' + each_key
                        try:  # int
                            v = int(each_val)
                            #print('Integer')
                        except:
                            try:  # float
                                v = float(each_val)
                            except:  # string
                                v = str.strip(each_val,'"')
                        #print('{}.{}: {}'.format(each_section,each_key, v))
                        attr.update({k:v})
                    
                # Add in frequency calibration info
                attr['Frequency Calibration.Slope'] = wl_slope
                attr['Frequency Calibration.Intercept'] = wl_intercept
                attr['Frequency Calibration.CenterWavelength'] = wl_center
    
                # Convert meta keys to match those of HDF5 version
                # Note: will not overwrite, just add-to
                # Note: Subject to change
                try:
                    ax1 = attr['Image data.1st axis']
    
                    if ax1 == 0:
                        attr['RasterScanParams.FastAxis'] = 'X'
                    elif ax1 == 1:
                        attr['RasterScanParams.FastAxis'] = 'Y'
                    elif ax1 == 2:
                        attr['RasterScanParams.FastAxis'] = 'Z'
                        
                    attr['RasterScanParams.FastAxisStart'] = x_start
                    attr['RasterScanParams.FastAxisStepSize'] = x_step_size
                    attr['RasterScanParams.FastAxisSteps'] = x_steps
                    attr['RasterScanParams.FastAxisStop'] = x[-1]
    
                    ax2 = attr['Image data.2nd axis']
                    
                    if ax2 == 0:
                        attr['RasterScanParams.SlowAxis'] = 'X'
                    elif ax2 == 1:
                        attr['RasterScanParams.SlowAxis'] = 'Y'
                    elif ax2 == 2:
                        attr['RasterScanParams.SlowAxis'] = 'Z'
                        
                    attr['RasterScanParams.SlowAxisStart'] = y_start
                    attr['RasterScanParams.SlowAxisStepSize'] = y_step_size
                    attr['RasterScanParams.SlowAxisSteps'] = y_steps
                    attr['RasterScanParams.SlowAxisStop'] = y[-1]
    
                    ax3 = attr['Image data.3rd axis']
    
                    if ax3 == 0:
                        attr['RasterScanParams.FixedAxis'] = 'X'
                    elif ax3 == 1:
                        attr['RasterScanParams.FixedAxis'] = 'Y'
                    elif ax3 == 2:
                        attr['RasterScanParams.FixedAxis'] = 'Z'
                        
                    # Figure out fixed positions later
                    
                except:
                    pass
                else:
                    output_cls_instance.data = data
                    output_cls_instance.meta = attr
                    return True

        except:
            print('Something failed in import')
Esempio n. 17
0
    def update_config(self):
        loglevel = self.get_log_level()
        logfile = self.get_log_file()

        self._api_log_level = -1
        self._msgs_since_config_update = 0
        self._time_at_config_update = time.time()

        # Retrieving the control API defined log level
        api_url = None
        if self._mlog_config_file and _os.path.isfile(self._mlog_config_file):
            cfg = _ConfigParser()
            cfg.read(self._mlog_config_file)
            cfgitems = self._get_config_items(cfg, _GLOBAL)
            cfgitems.update(self._get_config_items(cfg, self._subsystem))
            if MLOG_LOG_LEVEL in cfgitems:
                try:
                    self._config_log_level = int(cfgitems[MLOG_LOG_LEVEL])
                except:
                    _warnings.warn(
                        'Cannot parse log level {} from file {} to int'.format(
                            cfgitems[MLOG_LOG_LEVEL], self._mlog_config_file) +
                        '. Keeping current log level.')
            if MLOG_API_URL in cfgitems:
                api_url = cfgitems[MLOG_API_URL]
            if MLOG_LOG_FILE in cfgitems:
                self._config_log_file = cfgitems[MLOG_LOG_FILE]
        elif self._mlog_config_file:
            _warnings.warn('Cannot read config file ' + self._mlog_config_file)

        if (api_url):
            subsystem_api_url = api_url + "/" + self._subsystem
            try:
                data = _json.load(
                    urllib.request.urlopen(subsystem_api_url, timeout=5))
            except urllib.error.URLError as e:
                code_ = None
                if hasattr(e, 'code'):
                    code_ = ' ' + str(e.code)
                _warnings.warn('Could not connect to mlog api server at ' +
                               '{}:{} {}. Using default log level {}.'.format(
                                   subsystem_api_url, code_, str(e.reason),
                                   str(DEFAULT_LOG_LEVEL)))
            else:
                max_matching_level = -1
                for constraint_set in data['log_levels']:
                    level = constraint_set['level']
                    constraints = constraint_set['constraints']
                    if level <= max_matching_level:
                        continue

                    matches = 1
                    for constraint in constraints:
                        if constraint not in self._log_constraints:
                            matches = 0
                        elif (self._log_constraints[constraint] !=
                              constraints[constraint]):
                            matches = 0

                    if matches == 1:
                        max_matching_level = level

                self._api_log_level = max_matching_level
        if ((self.get_log_level() != loglevel
             or self.get_log_file() != logfile) and not self._init):
            self._callback()
Esempio n. 18
0
 def __init__(self):
     self.conf = _ConfigParser()
     self.conf.optionxform = str  # preserve case
Esempio n. 19
0
def csv_nist_import_data(pth,
                         filename_header,
                         filename_data,
                         output_cls_instance=None):
    """
    Import dataset(s) from HDF file

    Parameters
    ----------
    pth : str
        Path

    filename_header : str
        File name of header
        
    filename_data : str
        File name of data

    output_cls_instance : crikit.data.spectrum.Spectrum (or subclass)
        Spectrum class (or sub) object

    Returns
    -------
        Success : bool
            Success of import
        Data, Meta : list (ndarray, dict)
            If output_cls_instance is None and import is successful, will \
            return the data from dset_list and associated meta data.

    """
    # Join path and filename in an os-independant way
    pfname_header = _os.path.normpath(_os.path.join(pth, filename_header))
    pfname_data = _os.path.normpath(_os.path.join(pth, filename_data))

    valid_import_locs = 0

    try:
        with open(pfname_header, 'r') as _:
            pass
    except:
        print('Invalid header filename')
    else:
        valid_import_locs += 1

    try:
        with open(pfname_data, 'r') as _:
            pass
    except:
        print('Invalid data filename')
    else:
        valid_import_locs += 1

    if valid_import_locs != 2:
        return False
    else:
        try:
            config = _ConfigParser()
            config.read(pfname_header)

            # Frequency calibration
            probe = config.getfloat('Frequency Calibration', 'probe(nm)')
            wl1 = config.getfloat('Frequency Calibration', 'wavelength1(nm)')
            wl2 = config.getfloat('Frequency Calibration', 'wavelength2(nm)')
            pixel1 = config.getint('Frequency Calibration', 'pixel1')
            pixel2 = config.getint('Frequency Calibration', 'pixel2')
            f_begin = config.getfloat('Frequency Calibration',
                                      'freq index begin')
            f_size = config.getint('Frequency Calibration',
                                   'freq index length')
            f_min = config.getfloat('Frequency Calibration', 'freq Min')
            f_max = config.getfloat('Frequency Calibration', 'freq Max')

            pix = _np.linspace(pixel1, pixel2, f_size)
            wl = _np.linspace(wl1, wl2, f_size)
            wn = 1e7 * (1 / wl - 1 / probe)

            # Config is apparently backwards so flip order
            pix = _np.flipud(pix)
            wl = _np.flipud(wl)
            wn = _np.flipud(wn)

            # Frequency calibration stuff
            wl_center = wl.mean()
            wl_slope, wl_intercept = _np.polyfit(pix, wl, 1)

            # Get data
            with open(pfname_data, 'r') as csvfile:
                reader = _csv.reader(csvfile, delimiter='\t')
                data = []
                for count in reader:
                    data.append(count)
            data = _np.array(data).astype(float)

            if (data.shape[-1] == 3) | (data.shape[-1] == 2):  # Spectra
                print('Spectra')
                wn = data[:, 0]
                temp = data[:, -1]
                if data.shape[-1] == 3:
                    wl = data[:, 1]
                data = temp

                # Meta data
                attr = {}
                for each_section in config.sections():
                    #print('Section: {}'.format(each_section))
                    for (each_key, each_val) in config.items(each_section):
                        k = each_section + '.' + each_key
                        try:  # int
                            v = int(each_val)
                            #print('Integer')
                        except:
                            try:  # float
                                v = float(each_val)
                            except:  # string
                                v = str.strip(each_val, '"')
                        #print('{}.{}: {}'.format(each_section,each_key, v))
                        attr.update({k: v})

                # Add in frequency calibration info
                attr['Frequency Calibration.Slope'] = wl_slope
                attr['Frequency Calibration.Intercept'] = wl_intercept
                attr['Frequency Calibration.CenterWavelength'] = wl_center

                # Convert meta keys to match those of HDF5 version
                # Note: will not overwrite, just add-to
                # Note: Subject to change

                output_cls_instance.data = data
                output_cls_instance.meta = attr
                return True

            else:
                data = data.reshape((data.shape[0], -1, f_size))
                # Spatial Info
                x_start = config.getfloat('X scan Parameters', 'X start (um)')
                x_steps = config.getint('X scan Parameters', 'X steps')
                x_step_size = config.getfloat('X scan Parameters',
                                              'X step size (um)')
                x = _np.linspace(x_start,
                                 x_start + x_step_size * (x_steps - 1),
                                 x_steps)

                try:  # Exists a typo in header info in LabView program
                    y_start = config.getfloat('Y scan Paramters',
                                              'Y start (um)')
                    y_steps = config.getint('Y scan Paramters', 'Y steps')
                    y_step_size = config.getfloat('Y scan Paramters',
                                                  'Y step size (um)')
                    y = _np.linspace(y_start,
                                     y_start + y_step_size * (y_steps - 1),
                                     y_steps)
                except:  # In case typo is corrected in the future
                    y_start = config.getfloat('Y scan Parameters',
                                              'Y start (um)')
                    y_steps = config.getint('Y scan Parameters', 'Y steps')
                    y_step_size = config.getfloat('Y scan Parameters',
                                                  'Y step size (um)')
                    y = _np.linspace(y_start,
                                     y_start + y_step_size * (y_steps - 1),
                                     y_steps)

                # Meta data
                attr = {}
                for each_section in config.sections():
                    #print('Section: {}'.format(each_section))
                    for (each_key, each_val) in config.items(each_section):
                        k = each_section + '.' + each_key
                        try:  # int
                            v = int(each_val)
                            #print('Integer')
                        except:
                            try:  # float
                                v = float(each_val)
                            except:  # string
                                v = str.strip(each_val, '"')
                        #print('{}.{}: {}'.format(each_section,each_key, v))
                        attr.update({k: v})

                # Add in frequency calibration info
                attr['Frequency Calibration.Slope'] = wl_slope
                attr['Frequency Calibration.Intercept'] = wl_intercept
                attr['Frequency Calibration.CenterWavelength'] = wl_center

                # Convert meta keys to match those of HDF5 version
                # Note: will not overwrite, just add-to
                # Note: Subject to change
                try:
                    ax1 = attr['Image data.1st axis']

                    if ax1 == 0:
                        attr['RasterScanParams.FastAxis'] = 'X'
                    elif ax1 == 1:
                        attr['RasterScanParams.FastAxis'] = 'Y'
                    elif ax1 == 2:
                        attr['RasterScanParams.FastAxis'] = 'Z'

                    attr['RasterScanParams.FastAxisStart'] = x_start
                    attr['RasterScanParams.FastAxisStepSize'] = x_step_size
                    attr['RasterScanParams.FastAxisSteps'] = x_steps
                    attr['RasterScanParams.FastAxisStop'] = x[-1]

                    ax2 = attr['Image data.2nd axis']

                    if ax2 == 0:
                        attr['RasterScanParams.SlowAxis'] = 'X'
                    elif ax2 == 1:
                        attr['RasterScanParams.SlowAxis'] = 'Y'
                    elif ax2 == 2:
                        attr['RasterScanParams.SlowAxis'] = 'Z'

                    attr['RasterScanParams.SlowAxisStart'] = y_start
                    attr['RasterScanParams.SlowAxisStepSize'] = y_step_size
                    attr['RasterScanParams.SlowAxisSteps'] = y_steps
                    attr['RasterScanParams.SlowAxisStop'] = y[-1]

                    ax3 = attr['Image data.3rd axis']

                    if ax3 == 0:
                        attr['RasterScanParams.FixedAxis'] = 'X'
                    elif ax3 == 1:
                        attr['RasterScanParams.FixedAxis'] = 'Y'
                    elif ax3 == 2:
                        attr['RasterScanParams.FixedAxis'] = 'Z'

                    # Figure out fixed positions later

                except:
                    pass
                else:
                    output_cls_instance.data = data
                    output_cls_instance.meta = attr
                    return True

        except:
            print('Something failed in import')
Esempio n. 20
0
def get_auth(filename="credentials.ini", basic=False):
    """
    Set up NTLM authentication for the Microscopy Nexus using an account
    as specified from a file that lives in the package root named
    .credentials (or some other value provided as a parameter).
    Alternatively, the stored credentials can be overridden by supplying two
    environment variables: ``nexusLIMS_user`` and ``nexusLIMS_pass``. These
    variables will be queried first, and if not found, the method will
    attempt to use the credential file.

    Parameters
    ----------
    filename : str
        Name relative to this file (or absolute path) of file from which to
        read the parameters
    basic : bool
        If True, return only username and password rather than NTLM
        authentication (like what is used for CDCS access rather than for
        NIST network resources)

    Returns
    -------
    auth : ``requests_ntlm.HttpNtlmAuth`` or tuple
        NTLM authentication handler for ``requests``

    Notes
    -----
        The credentials file is expected to have a section named
        ``[nexus_credentials]`` and two values: ``username`` and
        ``password``. See the ``credentials.ini.example`` file included in
        the repository as an example.
    """
    try:
        username = _os.environ['nexusLIMS_user']
        passwd = _os.environ['nexusLIMS_pass']
        _logger.info("Authenticating using environment variables")
    except KeyError:
        # if absolute path was provided, use that, otherwise find filename in
        # this directory
        if _os.path.isabs(filename):
            pass
        else:
            filename = _os.path.join(_os.path.dirname(__file__), filename)

        # Raise error if the configuration file is not found
        if not _os.path.isfile(filename):
            raise AuthenticationError("No credentials were specified with "
                                      "environment variables, and credential "
                                      "file {} was not found".format(filename))

        config = _ConfigParser()
        config.read(filename)

        username = config.get("nexus_credentials", "username")
        passwd = config.get("nexus_credentials", "password")

    if basic:
        # return just username and password (for BasicAuthentication)
        return username, passwd

    domain = 'nist'
    path = domain + '\\' + username

    auth = _HttpNtlmAuth(path, passwd)

    return auth
from selenium.webdriver.common.keys import Keys as _Keys
from selenium.webdriver.support.ui import WebDriverWait as _WebDriverWait
from selenium.webdriver.support import expected_conditions as _EC
from selenium.webdriver.common.by import By as _By
from selenium.webdriver.chrome.options import Options as _Options

#-----------------------------------------------------------------------------
# Constants & Configs
#-----------------------------------------------------------------------------
_CONFIG_FILENAME = 'config.ini'
_MONTHS = [
    '', 'January', 'February', 'March', 'April', 'May', 'June', 'July',
    'August', 'September', 'October', 'November', 'December'
]

_config = _ConfigParser(interpolation=_ExtendedInterpolation())
_config_result = _config.read(_CONFIG_FILENAME)

if len(_config_result) == 0:
    raise FileNotFoundError(_errno.ENOENT, _os.strerror(_errno.ENOENT),
                            _CONFIG_FILENAME)

# Base URLs
_FEED_URL_STEM = _config['base_urls']['FEED_URL_STEM']
_ARCHIVE_FEED_STEM = _config['base_urls']['ARCHIVE_FEED_STEM']
_ARCHIVE_DOWNLOAD_STEM = _config['base_urls']['ARCHIVE_DOWNLOAD_STEM']
_LOGIN_URL = _config['base_urls']['LOGIN_URL']

# Throttle times
_FILE_REQUEST_WAIT = _config.getfloat('throttle_times',
                                      'FILE_REQUEST_WAIT',
Esempio n. 22
0
# -*- coding: utf-8 -*-
"""doc string"""

import os as _os
from configparser import ConfigParser as _ConfigParser

config = _ConfigParser()
config.read(_os.path.join(
    _os.path.dirname(__file__),
    r'..\..\config.ini',
))


if __name__ == '__main__':
    print(config.get('account', 'username'))
    def __init__(self,
                 feed_id,
                 username=None,
                 password=None,
                 login_cfg_path=None,
                 show_browser_ui=False,
                 webdriver_path=None):
        """
        A container for Broadcastify feed archive data, and an engine for re-
        trieving archive entry information & downloading the corresponding mp3
        files. Populates feed name, feed & archive URLs, and start & end dates
        on initialization.

        Init Parameters
        ---------------
        feed_id : str
            The unique feed identifier the container will represent, taken from
            https://www.broadcastify.com/listen/feed/[feed_id].
        username : str
            The username for a valid Broadcastify premium account.
        password : str
            The password for a valid Broadcastify premium account. Note that
            getting the property value will return only "True" (if set) or
            "False" (if not set) to maintain confidentiality.
        login_cfg_path : str
            An absolute path to a password configuration file. Allows the user
            to keep their login information outside the script using the archive
            for privacy reasons.
        show_browser_ui : bool
            If True, scraping done during initialization and build (which use
            the Selenium webdriver) will be done with the "headless" option set
            to False, resulting in a visible browser window being open in the UI
            during scraping. Otherwise, scraping will be done "invisibly".

            Note that no browser will be shown during download, since
            requests.Session() is used, not Selenium.
        webdriver_path : str
                Optional absolute path to WebDriver if it's not located in a
                directory in the PATH environment variable


        Other Attributes & Properties
        -----------------------------
        feed_url : str
            Full https URL for the feed's main "listen" page.
        archive_url : str
            Full https URL for the feed's archive page.
        entries : dictionary
            Container for archive entry information.
            uri : str
                [Populated at .build] The unique ID for an individual archive
                file page, which corresponds to a feed's transmissions over a
                ~30-minute period on a given date. Can be used to find the mp3
                file's individual download page. This page is password protected
                and limited to premium account holders.
            start_time : datetime
                [Populated at .build] Beginning time of the archive entry.
            end_time : datetime
                [Populated at .build] Ending time of the archive entry.
        earliest_entry : datetime
        latest_entry   : datetime
            The datetime of the earliest/latest archive entry currently in
            `entries`.
        start_date : datetime
        end_date   : datetime
            The datetime of the earliest/latest dates on the archive's calendar.
        throttle : _RequestThrottle
            (INTERNAL USE ONLY) Throttle http requests to the Broadcastify
            servers.
        """
        self.show_browser_ui = show_browser_ui
        if webdriver_path is None:
            self.webdriver_path = 'chromedriver'
        else:
            self.webdriver_path = webdriver_path

        self._feed_id = None

        self.feed_url = _FEED_URL_STEM + feed_id
        self.archive_url = _ARCHIVE_FEED_STEM + feed_id
        self.username = username
        self.password = password
        self.entries = []
        self.earliest_entry = None
        self.latest_entry = None
        self.start_date = None
        self.end_date = None
        self.throttle = _RequestThrottle()

        # If username or password was not passed...
        if (username is None
                or password is None) and login_cfg_path is not None:
            # ...try to get it from the pwd.ini file
            _config = _ConfigParser()
            config_result = _config.read(login_cfg_path)

            if len(config_result) != 0:
                # Replace only if argument was not passed
                if not (username):
                    self.username = _config['authentication_data']['username']
                if not (password):
                    self.password = _config['authentication_data']['password']

        self.feed_id = feed_id
        self._get_feed_name(feed_id)
Esempio n. 24
0
from configparser import ConfigParser as _ConfigParser
from importlib import resources

__version__ = "1.2.0"

_cfg = _ConfigParser()

with resources.path("reader", "config.cfg") as _path:
    _cfg.read(str(_path))

URL = _cfg.get("feed", "url")
Esempio n. 25
0
if 3 == python_version.major:
    if 2 <= python_version.minor:
        from configparser import ( # pylint: disable=F0401
            ConfigParser            as _ConfigParser,
        )
    else:
        from configparser import ( # pylint: disable=F0401
            SafeConfigParser        as _ConfigParser,
        )
else:
    from ConfigParser import ( # pylint: disable=F0401
        SafeConfigParser        as _ConfigParser,
    )

_path_to_config = _join_path( __path__[ 0 ], "version.cfg" )
_vinfo_CFG = _ConfigParser( )
if _path_to_config not in _vinfo_CFG.read( _path_to_config ):
    raise IOError(
        "Configuration file '{0}' expected but not found.".format(
            _path_to_config
        )
    )

_vinfo_release_type     = _vinfo_CFG.get( "control", "release_type" )
assert _vinfo_release_type in [ "bugfix", "candidate", "development" ]
_vinfo_numbers_DICT     = dict( _vinfo_CFG.items( "numbers" ) )
if   "bugfix" == _vinfo_release_type: # Stable Bugfix Release
    __version__ = \
    "{major}.{minor}.{bugfix}".format( **_vinfo_numbers_DICT )
elif "candidate" == _vinfo_release_type: # Release Candidate
    __version__ = \