コード例 #1
0
ファイル: filesystem.py プロジェクト: stanislav-web/OpenDoor
    def readcfg(filename):
        """
        Read .cfg file
        :param str filename: input filename
        :raise FileSystemError
        :return: configparser.RawConfigParser
        """

        expression = '^([\/a-z].*?opendoor.*?)\/'
        find_dir = re.search(expression, __file__, re.IGNORECASE)
        if None is not find_dir:
            os.chdir(find_dir.group())
        filepath = os.path.join(os.path.sep, os.getcwd(), filename)

        if not os.path.isfile(filepath):
            raise FileSystemError("{0} is not a file ".format(filepath))
        if not os.access(filepath, os.R_OK):
            raise FileSystemError("Configuration file {0} can not be read. Setup chmod 0644".format(filepath))

        try:

            config = RawConfigParser()
            config.read(filepath)
            return config

        except (ParsingError, NoOptionError) as error:
            raise FileSystemError(error)
コード例 #2
0
    def load(self, csv):
        conf = RawConfigParser()
        # utf-8-sig per https://bugs.python.org/issue7185#msg94346
        with open(csv, encoding='utf-8-sig') as f:
            conf.read_file(f)

        for sec in conf.sections():
            if not self.conf.has_section(sec):
                self.conf.add_section(sec)
                self.crap.add_section(sec)

            for k, v in conf.items(sec):
                is_crap = False

                if '__' in v:
                    is_crap = True

                if not is_crap:
                    if self.conf.has_option(sec, k):
                        if self.conf.get(sec, k).lower() != v.lower():
                            print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v))

                    self.conf.set(sec, k, v)
                else:
                    if self.crap.has_option(sec, k):
                        print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v))

                    self.crap.set(sec, k, v)
コード例 #3
0
def run():
	# load the config file
	global master_config
	master_config = RawConfigParser()
	master_config.read(config_directory + MASTER_CONFIG_FILENAME)

	# set the get-iplayer path
	global get_iplayer_path
	if master_config.has_option("General", "get-iplayer_path"):
		get_iplayer_path = master_config.get("General", "get-iplayer_path")

	# refresh the get-iplayer cache
	# print("Refreshing get-iplayer... (this may take some time)")
	# subprocess.check_output([get_iplayer_path, "--type=all", "--quiet"])

	# new BBC rules :-( now we get the programm info externally using a different script
	# and read that scripts output directly into a hash
	# global bbc_programmes
	# bbc_programmes = load_bbc_programmes()

	# scan for feed config files and process each
	for root, directories, files in os.walk(config_directory + FEED_CONFIG_DIRECTORY):
		for filename in files:
			if filename == ".DS_Store":
				continue

			print("about to read config " + filename )
			load_feed(filename)
		print("Finished.")
		return # stop here, we have processed the feeds

	# if we have not returned at this point, then no config directory was found, this is a problem
	print("No config directory found")
コード例 #4
0
ファイル: main.py プロジェクト: cantgitenough/djradicale
def _read_from_sections(user, collection_url, permission):
    regex = ConfigParser({'login': user, 'path': collection_url})
    for rights in (INITIAL_RIGHTS, settings.DJRADICALE_RIGHTS):
        for section, values in rights.items():
            if not regex.has_section(section):
                regex.add_section(section)
            for key, value in values.items():
                regex.set(
                    section, key,
                    value % {
                        'login': re.escape(user),
                        'path': re.escape(collection_url),
                    })
    log.LOGGER.debug("Rights type '%s'" % __name__)

    for section in regex.sections():
        re_user = regex.get(section, 'user')
        re_collection = regex.get(section, 'collection')
        log.LOGGER.debug(
            "Test if '%s:%s' matches against '%s:%s' from section '%s'" % (
                user, collection_url, re_user, re_collection, section))
        user_match = re.match(re_user, user)
        if user_match:
            re_collection = re_collection.format(*user_match.groups())
            if re.match(re_collection, collection_url):
                log.LOGGER.debug("Section '%s' matches" % section)
                if permission in regex.get(section, 'permission'):
                    return True
            else:
                log.LOGGER.debug("Section '%s' does not match" % section)
    return False
コード例 #5
0
    def run_generator(self, expect_error=False):
        '''Run sysv-generator.

        Fail if stderr contains any "Fail", unless expect_error is True.
        Return (stderr, filename -> ConfigParser) pair with ouput to stderr and
        parsed generated units.
        '''
        env = os.environ.copy()
        env['SYSTEMD_LOG_LEVEL'] = 'debug'
        env['SYSTEMD_SYSVINIT_PATH'] = self.init_d_dir
        env['SYSTEMD_SYSVRCND_PATH'] = self.rcnd_dir
        env['SYSTEMD_UNIT_PATH'] = self.unit_dir
        gen = subprocess.Popen(
            [sysv_generator, 'ignored', 'ignored', self.out_dir],
            stdout=subprocess.PIPE, stderr=subprocess.PIPE,
            universal_newlines=True, env=env)
        (out, err) = gen.communicate()
        if not expect_error:
            self.assertFalse('Fail' in err, err)
        self.assertEqual(gen.returncode, 0, err)

        results = {}
        for service in glob(self.out_dir + '/*.service'):
            if os.path.islink(service):
                continue
            cp = RawConfigParser()
            cp.optionxform = lambda o: o  # don't lower-case option names
            with open(service) as f:
                cp.readfp(f)
            results[os.path.basename(service)] = cp

        return (err, results)
コード例 #6
0
ファイル: __init__.py プロジェクト: theshillito/pseudomyth
def populate_config():
    config_path = os.path.expanduser('~/.pseudomyth')

    if os.path.exists(config_path):
        parser = RawConfigParser(dict_type=dict)
        parser.read(config_path)
        CONFIG.update(parser.defaults())
コード例 #7
0
ファイル: test_webhook.py プロジェクト: zedian/webhook
def add_handler(filename, event, handler):
    config = RawConfigParser()
    config.read(filename)
    config['Handlers'][event] = handler

    with open(filename, 'w') as f:
        config.write(f)
コード例 #8
0
ファイル: daemon_test.py プロジェクト: mymoflx/observant
def test():
    config = CParser()
    config.read('./etc/observant/observant.cfg')

    pid = os.fork()
    if pid != 0:
        os.waitpid(pid, 0)
    else:
        dump = './.test.out'
        sockPath = './.test.sock'
        server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        server.setblocking(0)
        with open(dump, 'w') as fd:
            fd.write("test")
            server.bind(sockPath)
            server.listen(1)
            transport = daemon.transportFromConfig(config)
            lock = str(config.get('daemon', 'lock'))
            sock = str(config.get('daemon', 'sock'))
            key = str(config.get('daemon', 'key'))
            client = StatsCore.attachOrCreateStatsDaemon(key, transport, pid=lock, sock=sock)
            client.postWatchPid('test', os.getpid())
            time.sleep(4)
            client.postLogMessageForKey('test', 'some random logmessage')
            time.sleep(4)
            client.postLogMessageForKey('test', 'some random logmessage')
            client.close()
            time.sleep(4)
        os.remove(dump)
        server.close()
        os.unlink(sockPath)
コード例 #9
0
ファイル: sensor.py プロジェクト: ricco386/broadcaster
 def load_config(fp, reopen=False):
     config = RawConfigParser()
     if reopen:
         fp = open(fp.name)
     config.readfp(fp)  # TODO: Deprecated since python 3.2
     fp.close()
     return config
コード例 #10
0
ファイル: config.py プロジェクト: Perdu/poezio
 def __init__(self, file_name, default=None):
     RawConfigParser.__init__(self, None)
     # make the options case sensitive
     self.optionxform = str
     self.file_name = file_name
     self.read_file()
     self.default = default
コード例 #11
0
ファイル: config.py プロジェクト: Perdu/poezio
 def set_and_save(self, option, value, section=DEFSECTION):
     """
     set the value in the configuration then save it
     to the file
     """
     # Special case for a 'toggle' value. We take the current value
     # and set the opposite. Warning if the no current value exists
     # or it is not a bool.
     if value == "toggle":
         current = self.get(option, "", section)
         if isinstance(current, bool):
             value = str(not current)
         else:
             if current.lower() == "false":
                 value = "true"
             elif current.lower() == "true":
                 value = "false"
             else:
                 return (_('Could not toggle option: %s.'
                           ' Current value is %s.') %
                               (option, current or _("empty")),
                         'Warning')
     if self.has_section(section):
         RawConfigParser.set(self, section, option, value)
     else:
         self.add_section(section)
         RawConfigParser.set(self, section, option, value)
     if not self.write_in_file(section, option, value):
         return (_('Unable to write in the config file'), 'Error')
     return ("%s=%s" % (option, value), 'Info')
コード例 #12
0
ファイル: config.py プロジェクト: kookerus/UnitsConverter
 def get_sections(self):
     """
         Returns a list of sections in the ini file
     """
     config = RawConfigParser()
     config.read(self.file_name)
     return config.sections()
コード例 #13
0
ファイル: core.py プロジェクト: jacquemier/ctapipe
    def write(self, filename, impl=FITS ,implementation=DATAIMPL):
        """
        write configuration entries to a file.

        Parameters:
        -----------
        filename: str
            Full path name:  Save all configuration entries
            to this given filename
        impl: str , optional
        "FITS" -> use Fits format
        "INI"  -> use windows style ini format
        """
        
        if ( impl == self.FITS):
            self._write_fits(filename,implementation)

        #Write an .ini-format representation of the configuration state.
        elif ( impl == self.INI):
            config_parser = RawConfigParser()
            self._fill(config_parser)
            with open(filename, 'w') as config_file: 
                config_parser.write(config_file)
        else:
            print("Format:",impl,'not allowed',file=sys.stderr)
コード例 #14
0
ファイル: state.py プロジェクト: spanezz/egt
    def load(self, statedir: str = None) -> None:
        if statedir is None:
            statedir = self.get_state_dir()

        statefile = os.path.join(statedir, "state.json")
        if os.path.exists(statefile):
            # Load state from JSON file
            with open(statefile, "rt") as fd:
                state = json.load(fd)
            self.projects = state["projects"]
            return

        # TODO: remove support for legacy format
        statefile = os.path.join(statedir, "state")
        if os.path.exists(statefile):
            # Load state from legacy .ini file
            from configparser import RawConfigParser
            cp = RawConfigParser()
            cp.read([statefile])
            for secname in cp.sections():
                if secname.startswith("proj "):
                    name = secname.split(None, 1)[1]
                    fname = cp.get(secname, "fname")
                    self.projects[name] = {"fname": fname}
            return
コード例 #15
0
ファイル: runner.py プロジェクト: d9pouces/PolyArchiv
 def _iter_config_parsers(self, pattern):
     for path in self.config_directories:
         count = 0
         file_list = glob.glob(os.path.join(path, pattern))
         file_list.sort()
         for config_file in file_list:
             count += 1
             parser = RawConfigParser()
             try:
                 # noinspection PyTypeChecker
                 open(config_file, "rb").read(1)
                 parser.read([config_file])
             except IOError as e:
                 if e.errno == errno.EACCES:
                     username = pwd.getpwuid(os.getuid())[0]
                     self.print_info(
                         "%s is ignored because user %s cannot read it"
                         % (config_file, username)
                     )
                     continue
                 raise
             except ConfigError:
                 raise ValueError(
                     "File '%s' is not a valid '.ini' file" % config_file
                 )
             self.print_info("File %s added to the configuration" % config_file)
             yield config_file, parser
         if count == 0:
             self.print_info("No %s file found in %s" % (pattern, path))
コード例 #16
0
ファイル: iplayercast.py プロジェクト: oscarkey/iplayercast
def run():
	# print a warning about copyright
	print("WARNING: Do not use the script to produce public podcasts, it is for personal use only.")
	print("If you publically serve programmes you may be in violation of the BBC's copyright.")
	
	# load the config file
	global master_config
	master_config = RawConfigParser()
	master_config.read(config_directory + MASTER_CONFIG_FILENAME)
	
	# set the get-iplayer path
	global get_iplayer_path
	if master_config.has_option("General", "get-iplayer_path"):
		get_iplayer_path = master_config.get("General", "get-iplayer_path")
		
	# refresh the get-iplayer cache
	print("Refreshing get-iplayer... (this may take some time)")
	subprocess.check_output([get_iplayer_path, "--type=all", "--quiet"])
	
	# scan for feed config files and process each
	for root, directories, files in os.walk(config_directory + FEED_CONFIG_DIRECTORY):
		for filename in files:
			load_feed(filename)
		print("Finished.")
		return # stop here, we have processed the feeds
	
	# if we have not returned at this point, then no config directory was found, this is a problem
	print("No config directory found")
コード例 #17
0
ファイル: acbs_parser.py プロジェクト: liushuyu/DraftBin
def parse_abbs_spec(spec_file_loc, pkg_name):
    try:
        fp = open(spec_file_loc + '/spec', 'rt')
        spec_cont = fp.read()
        fp.close()
    except:
        print('[E] Failed to load spec file! Do you have read permission?')
        return False
    # Stupid but necessary laundry list of possible varibles
    script = spec_cont + gen_laundry_list(['VER', 'REL', 'SUBDIR', 'SRCTBL', 'GITSRC',
                                           'GITCO', 'GITBRCH', 'SVNSRC', 'SVNCO', 'HGSRC', 'BZRSRC', 'BZRCO', 'DUMMYSRC'])
    try:
        # Better to be replaced by subprocess.Popen
        spec_out = subprocess.check_output(script, shell=True)
    except:
        print('[E] Malformed spec file found! Couldn\'t continue!')
        return False
    # Assume it's UTF-8 since we have no clue of the real world on how it
    # works ...
    spec_fp = io.StringIO('[wrap]\n' + spec_out.decode('utf-8'))
    config = RawConfigParser()
    config.read_file(spec_fp)
    config_dict = {}
    for i in config['wrap']:
        config_dict[i.upper()] = config['wrap'][i]
    config_dict['NAME'] = pkg_name
    res, err_msg = parser_validate(config_dict)
    if res is not True:
        print('[E] {}'.format(err_msg))
        return False
    return config_dict
コード例 #18
0
ファイル: config.py プロジェクト: mathieui/poezio
 def __init__(self, file_name: Path, default=None) -> None:
     RawConfigParser.__init__(self, None)
     # make the options case sensitive
     self.optionxform = lambda param: str(param)
     self.file_name = file_name
     self.read_file()
     self.default = default
コード例 #19
0
    def load(self, csv):
        conf = RawConfigParser()
        with open(csv, 'rb') as f:
            input_bytes = f.read()
            decoded = input_bytes.decode(chardet.detect(input_bytes)['encoding'])
            decoded = '[__global__]\n' + decoded
            conf.read_string(decoded)

        for sec in conf.sections():
            if not self.conf.has_section(sec):
                self.conf.add_section(sec)
                self.crap.add_section(sec)

            for k, v in conf.items(sec):
                is_crap = False

                if '__' in v:
                    is_crap = True

                if not is_crap:
                    if self.conf.has_option(sec, k):
                        if self.conf.get(sec, k).lower() != v.lower():
                            print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v))

                    self.conf.set(sec, k, v)
                else:
                    if self.crap.has_option(sec, k):
                        print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v))

                    self.crap.set(sec, k, v)
コード例 #20
0
    def set_environ_vars(cls):

        if not os.path.exists(os.path.join(os.getenv("PINGUINO_DATA"), "paths.cfg")):
            logging.error("Missing: "+os.path.join(os.getenv("PINGUINO_DATA"), "paths.cfg"))
            sys.exit()

        config_paths = RawConfigParser()
        config_paths.readfp(open(os.path.join(os.getenv("PINGUINO_DATA"), "paths.cfg"), "r"))

        #RB20141116 : get the “bitness” of the current OS
        bitness, linkage = platform.architecture()
        os.environ["PINGUINO_OS_ARCH"] = bitness

        if os.name == "posix": #GNU/Linux
            os.environ["PINGUINO_OS_NAME"] = "linux"

        #Mac could return posix :/
        elif os.name == "os2":  #Mac OS X
            os.environ["PINGUINO_OS_NAME"] = "macosx"

        elif os.name == "nt":  #Windows
            os.environ["PINGUINO_OS_NAME"] = "windows"

        #load path from paths.conf
        os.environ["PINGUINO_USER_PATH"] = os.path.expandvars(os.path.expanduser(config_paths.get("paths-%s"%os.getenv("PINGUINO_OS_NAME"), "user_path")))
        os.environ["PINGUINO_INSTALL_PATH"] = os.path.expandvars(os.path.expanduser(config_paths.get("paths-%s"%os.getenv("PINGUINO_OS_NAME"), "install_path")))
        os.environ["PINGUINO_USERLIBS_PATH"] = os.path.expandvars(os.path.join(os.getenv("PINGUINO_USER_PATH"), "library_manager"))
コード例 #21
0
def settingsFromFile(infile, defaults):
    """Given a path string :attr:`infile`, load settings and return them as
    dictionary.

    Args:
        infile (str): a path to a file
        defaults (dict): a dictionary containing fallback values
    """
    config = RawConfigParser()
    config.optionxform = lambda option: option
    try:
        with open(infile) as f:
            try:
                config.read_file(f)
            except MissingSectionHeaderError:
                config['General'] = defaults
    except OSError:
        config['General'] = defaults
    for key in defaults:
        if key not in config['General']:
            config['General'][key] = defaults[key]
    try:
        if int(config['General']['UpdateEvery']) <= 0:
            config['General']['UpdateEvery'] = defaults['UpdateEvery']
    except ValueError:
        # can't convert to integer
        config['General']['UpdateEvery'] = defaults['UpdateEvery']
    for value in ('SyncNewCourses', ):
        try:
            booleanvalue = config.getboolean('General', value)
            config['General'][value] = str(booleanvalue)
        except ValueError:
            # can't convert to boolean
            config['General'][value] = defaults[value]
    return dict(config['General'])
コード例 #22
0
ファイル: server.py プロジェクト: inpho/topic-explorer
def get_host_port(args):
    """
    Returns the hostname and port number
    """
    import topicexplorer.config
    config = topicexplorer.config.read(args.config)

    # automatic port assignment
    def test_port(port):
        try:
            host = args.host or config.get("www", "host")
            if host == '0.0.0.0':
                host = 'localhost'
            try:
                s = socket.create_connection((host, port), 2)
                s.close()
                raise IOError("Socket connectable on port {0}".format(port))
            except socket.error:
                pass
            return port
        except IOError:
            if not args.quiet:
                port = int_prompt(
                    "Conflict on port {0}. Enter new port:".format(port))
                return test_port(port)
            else:
                raise IOError(
                    "Conflict on port {0}. Try running with -p to manually set new port.".format(port))

    port = args.port or int(config.get('www', 'port').format(0))
    port = test_port(port)

    # prompt to save
    if (int(config.get("www", "port").format(0))) != port:
        if not args.quiet and bool_prompt(
            "Change default baseport to {0}?".format(port), default=True):
            config.set("www", "port", text(port))

            # create deep copy of configuration
            # see http://stackoverflow.com/a/24343297
            config_string = StringIO()
            config.write(config_string)

            # skip DEFAULT section
            config_string.seek(0)
            idx = config_string.getvalue().index("[main]")
            config_string.seek(idx)

            # read deep copy
            new_config = ConfigParser()
            config.read_file(config_string)

            # write deep copy without DEFAULT section
            # this preserves DEFAULT for rest of program
            with open(args.config, 'w') as configfh:
                new_config.write(configfh)

    # hostname assignment
    host = args.host or config.get('www', 'host')
    return host, port
コード例 #23
0
ファイル: main.py プロジェクト: ricco386/broadcaster
 def load_config(fp, reopen=False):
     config = RawConfigParser()
     if reopen:
         fp = open(fp.name)
     config.readfp(fp)
     fp.close()
     return config
コード例 #24
0
ファイル: _config.py プロジェクト: aregee/Mailman
    def _getExtendedConfs(self, conf_filename, conf_data, confs=None):
        """Return a list of tuple (conf_name, parser, encoding_errors).

        :param conf_filename: The path and name of the conf file.
        :param conf_data: Unparsed config data.
        :param confs: A list of confs that extend filename.
        :return: A list of confs ordered from extender to extendee.
        :raises IOError: If filename cannot be read.

        This method parses the config data and checks for encoding errors.
        It checks parsed config data for the extends key in the meta section.
        It reads the unparsed config_data from the extended filename.
        It passes filename, data, and the working list to itself.
        """
        if confs is None:
            confs = []
        encoding_errors = self._verifyEncoding(conf_data)
        parser = RawConfigParser()
        parser.readfp(StringIO(conf_data), conf_filename)
        confs.append((conf_filename, parser, encoding_errors))
        if parser.has_option('meta', 'extends'):
            base_path = dirname(conf_filename)
            extends_name = parser.get('meta', 'extends')
            extends_filename = abspath('%s/%s' % (base_path, extends_name))
            extends_data = read_content(extends_filename)
            self._getExtendedConfs(extends_filename, extends_data, confs)
        return confs
コード例 #25
0
def main():
    logging.basicConfig(level=logging.INFO)
    args = docopt(__doc__, version='PeP et Al. emails v0.0.1')
    database = read_database(args['<database>'])

    config = RawConfigParser()
    successful_files = config.read(args['--config'])
    if not successful_files:
        raise IOError('Could not read config-file')

    backend = args['--backend'] or config.sections()[0]

    if backend == 'smtplib':
        from .backends import SMTPLibMailer
        mailer = SMTPLibMailer(**config['smtplib'])

    elif backend == 'mailgun':
        from .backends import MailgunMailer
        mailer = MailgunMailer(**config['mailgun'])

    else:
        raise ValueError('Unsupported backend: {}'.format(args['--backend']))

    template, metadata = parse_template(args['<template>'])

    for recipient in database.itertuples():
        markdown = template.render(recipient=recipient, metadata=metadata)
        html = gfm.markdown(markdown)
        mailer.send_mail(
            recipient,
            metadata,
            markdown,
            html=html,
            attachments=metadata.get('attachments')
        )
コード例 #26
0
def init(app, config_file):
    global metadata
    config = ConfigParser()
    config.read(config_file)

    try:
        filename = config.get('bibtex', 'path')
    except ConfigParserError:
        model_path = config.get('main', 'path')
        filename = os.path.join(model_path, 'library.bib')

    print("Loading Bibtex metadata from", filename)
    bib = parse_file(filename)

    metadata = dict()
    for entry in bib.entries:
        key = '/' + bib.entries[entry].fields.get('file', '').replace(':pdf', '')[1:]
        if 'C$\\backslash$:' in key:
            key = key.replace('C$\\backslash$:', '')
            key = key[1:]
            key = os.path.normpath(key)
        key = os.path.basename(key)
        try:
            citation = pybtex.format_from_file(
                filename, style='plain', output_backend='text', citations=[entry])[3:]
            metadata[key] = citation
        except PybtexError:
            metadata[key] = filename
コード例 #27
0
ファイル: _config.py プロジェクト: aregee/Mailman
    def __init__(self, filename, file_object=None):
        """Load a configuration schema from the provided filename.

        :param filename: The name of the file to load from, or if
            `file_object` is given, to pretend to load from.
        :type filename: string
        :param file_object: If given, optional file-like object to read from
            instead of actually opening the named file.
        :type file_object: An object with a readline() method.
        :raise `UnicodeDecodeError`: if the string contains non-ascii
            characters.
        :raise `RedefinedSectionError`: if a SectionSchema name is redefined.
        :raise `InvalidSectionNameError`: if a SectionSchema name is
            ill-formed.
        """
        # XXX sinzui 2007-12-13:
        # RawConfigParser permits redefinition and non-ascii characters.
        # The raw schema data is examined before creating a config.
        self.filename = filename
        self.name = basename(filename)
        self._section_schemas = {}
        self._category_names = []
        if file_object is None:
            raw_schema = self._getRawSchema(filename)
        else:
            raw_schema = file_object
        parser = RawConfigParser()
        parser.readfp(raw_schema, filename)
        self._setSectionSchemasAndCategoryNames(parser)
コード例 #28
0
    def __init__(self, filenames_to_try=[]):

        # FUN FACT:  In Python 3.2, they spontaneously changed the behaviour of
        # RawConfigParser so that it no longer considers ';' a comment delimiter
        # for inline comments.
        #
        # Compare:
        #   "Configuration files may include comments, prefixed by specific
        #   characters (# and ;). Comments may appear on their own in an otherwise
        #   empty line, or may be entered in lines holding values or section names.
        #   In the latter case, they need to be preceded by a whitespace character
        #   to be recognized as a comment. (For backwards compatibility, only ;
        #   starts an inline comment, while # does not.)"
        #  -- https://docs.python.org/2/library/configparser.html
        # vs:
        #   "Comment prefixes are strings that indicate the start of a valid comment
        #   within a config file. comment_prefixes are used only on otherwise empty
        #   lines (optionally indented) whereas inline_comment_prefixes can be used
        #   after every valid value (e.g. section names, options and empty lines as
        #   well). By default inline comments are disabled and '#' and ';' are used
        #   as prefixes for whole line comments.
        #   Changed in version 3.2: In previous versions of configparser behaviour
        #   matched comment_prefixes=('#',';') and inline_comment_prefixes=(';',)."
        #  -- https://docs.python.org/3/library/configparser.html#customizing-parser-behaviour
        #
        # Grrr...
        if sys.version_info.major >= 3:
            self._cp = RawConfigParser(dict_type=OrderedMultiDict, inline_comment_prefixes=(';',))
        else:
            self._cp = RawConfigParser(dict_type=OrderedMultiDict)

        if isinstance(filenames_to_try, str):
            filenames_to_try = [filenames_to_try]
        self._filenames_to_try = filenames_to_try[:]
コード例 #29
0
def read_configfile():
	global cfg, DEBUG,DOVECOT,GPGMAILENCRYPT,MAILDIRLOCK
	cfg=dict()
	_cfg = RawConfigParser()

	try:
		_cfg.read(CONFIGFILE)
	except:
		log("Could not read config file '%s'."%CONFIGFILE,"e",ln=lineno())
		return

	for sect in _cfg.sections():
		cfg[sect] = dict()

		for (name, value) in _cfg.items(sect):
			cfg[sect][name] = value

	if 'default' in cfg:

		if 'gpgmailencrypt' in cfg['default']:
			GPGMAILENCRYPT=cfg['default']['gpgmailencrypt']

	if 'mail' in cfg:

		if 'dovecot' in cfg['mail'] and cfg['mail']['dovecot']=="yes":
			DOVECOT=True

		if 'maildirlock' in cfg['mail']:
			MAILDIRLOCK=cfg['mail']['maildirlock']
コード例 #30
0
ファイル: update.py プロジェクト: miroi/xcint
def parse_cmake_module(s_in):

    s_out = []
    is_rst_line = False
    for line in s_in.split('\n'):
        if is_rst_line:
            if len(line) > 0:
                if line[0] != '#':
                    is_rst_line = False
            else:
                is_rst_line = False
        if is_rst_line:
            s_out.append(line[2:])
        if '#.rst:' in line:
            is_rst_line = True

    autocmake_entry = '\n'.join(s_out).split('Example autocmake.cfg entry::')[1]
    autocmake_entry = autocmake_entry.replace('\n  ', '\n')

    buf = StringIO(autocmake_entry)
    config = RawConfigParser(dict_type=OrderedDict)
    config.readfp(buf)

    config_docopt = None
    config_define = None
    config_export = None
    for section in config.sections():
        if config.has_option(section, 'docopt'):
            config_docopt = config.get(section, 'docopt')
        if config.has_option(section, 'define'):
            config_define = config.get(section, 'define')
        if config.has_option(section, 'export'):
            config_export = config.get(section, 'export')

    return config_docopt, config_define, config_export
コード例 #31
0
def doConfig():
    shakehome = input(
        'Please specify the root folder where ShakeMap is installed: ')
    if not os.path.isdir(shakehome):
        print('%s is not a valid path.  Returning.' % shakehome)
    user = input('Please specify K-NET user name: ')
    password = input('Please specify K-NET password: '******'KNET')
    config.add_section('SHAKEMAP')
    config.set('KNET', 'user', user)
    config.set('KNET', 'password', password)
    config.set('SHAKEMAP', 'shakehome', shakehome)
    homedir = os.path.expanduser('~')
    configfolder = os.path.join(homedir, '.smtools')
    configfile = os.path.join(configfolder, 'config.ini')
    if not os.path.isdir(configfolder):
        os.makedirs(configfolder)
    with open(configfile, 'wb') as configfile:
        config.write(configfile)
コード例 #32
0
 def read(self, filename):
     self._filename = filename
     RawConfigParser.read(self, filename)
コード例 #33
0
ファイル: base.py プロジェクト: cpwnd/odmf
 def to_config(self):
     """
     Returns a ConfigParser.RawConfigParser with the data of this description
     """
     config = RawConfigParser(allow_no_value=True)
     session = db.Session()
     inst = session.query(db.Datasource).get(self.instrument)
     if not inst:
         raise ValueError(
             'Error in import description: %s is not a valid instrument id')
     session.close()
     section = str(inst)
     config.add_section(section)
     config.set(section, 'instrument', self.instrument)
     config.set(section, 'skiplines', self.skiplines)
     # Replace space and tab by keywords
     config.set(section, 'delimiter', {
         ' ': 'SPACE',
         '\t': 'TAB'
     }.get(self.delimiter, self.delimiter))
     config.set(section, 'decimalpoint', self.decimalpoint)
     config.set(section, 'dateformat', self.dateformat)
     config.set(section, 'datecolumns', str(self.datecolumns).strip('(), '))
     config.set(section, 'project', self.project)
     config.set(section, 'timezone', self.timezone)
     config.set(section, 'nodata', self.nodata)
     config.set(section, 'worksheet', self.worksheet)
     if self.sample_mapping:
         config.set(section, 'sample_mapping', self.sample_mapping)
     if self.fileextension:
         config.set(section, 'fileextension', self.fileextension)
     for col in self.columns:
         section = col.name
         config.add_section(section)
         col.to_config(config, section)
     return config
コード例 #34
0
 def __init__(self, logger):
     self.logger = logger
     self.config_path = os.path.expanduser('~') + '/.okta-aws'
     self._value = RawConfigParser()
     self._value.read(self.config_path)
コード例 #35
0
class OktaAuthConfig():
    """ Config helper class """
    def __init__(self, logger):
        self.logger = logger
        self.config_path = os.path.expanduser('~') + '/.okta-aws'
        self._value = RawConfigParser()
        self._value.read(self.config_path)

    def base_url_for(self, okta_profile):
        """ Gets base URL from config """
        if self._value.has_option(okta_profile, 'base-url'):
            base_url = self._value.get(okta_profile, 'base-url')
            self.logger.info("Authenticating to: %s" % base_url)
        else:
            base_url = self._value.get('default', 'base-url')
            self.logger.info("Using base-url from default profile %s" % base_url)
        return base_url

    def app_link_for(self, okta_profile):
        """ Gets app_link from config """
        app_link = None
        if self._value.has_option(okta_profile, 'app-link'):
            app_link = self._value.get(okta_profile, 'app-link')
        elif self._value.has_option('default', 'app-link'):
            app_link = self._value.get('default', 'app-link')
        self.logger.info("App Link set as: %s" % app_link)
        return app_link

    def username_for(self, okta_profile):
        """ Gets username from config """
        if self._value.has_option(okta_profile, 'username'):
            username = self._value.get(okta_profile, 'username')
            self.logger.info("Authenticating as: %s" % username)
        else:
            username = input('Enter username: '******'password'):
            password = self._value.get(okta_profile, 'password')
        else:
            password = getpass('Enter password: '******'factor'):
            factor = self._value.get(okta_profile, 'factor')
            self.logger.debug("Setting MFA factor to %s" % factor)
            return factor
        return None

    def save_chosen_role_for_profile(self, okta_profile, role_arn):
        """ Gets role from config """
        if not self._value.has_section(okta_profile):
            self._value.add_section(okta_profile)

        base_url = self.base_url_for(okta_profile)
        self._value.set(okta_profile, 'base-url', base_url)
        self._value.set(okta_profile, 'role', role_arn)

        with open(self.config_path, 'w+') as configfile:
            self._value.write(configfile)

    def save_chosen_app_link_for_profile(self, okta_profile, app_link):
        """ Gets role from config """
        if not self._value.has_section(okta_profile):
            self._value.add_section(okta_profile)

        base_url = self.base_url_for(okta_profile)
        self._value.set(okta_profile, 'base-url', base_url)
        self._value.set(okta_profile, 'app-link', app_link)

        with open(self.config_path, 'w+') as configfile:
            self._value.write(configfile)
コード例 #36
0
ファイル: configs.py プロジェクト: pombredanne/selenose
class DriverConfig(object):
    '''
    SELENIUM driver configuration.
    '''
    # Available environments
    envs = {
        ChromeEnv.key : ChromeEnv,
        FirefoxEnv.key: FirefoxEnv,
        IeEnv.key     : IeEnv,
        RemoteEnv.key : RemoteEnv,
    }
    
    def __init__(self, files):
        '''
        Initialize the configuration with a list of files.
        '''
        # Create a new parser
        self.parser = RawConfigParser()
        # Load default configuration
        self.builtins()
        # Load the files
        self.parser.read(files)
        # Store a server configuration
        self.server = ServerConfig(files)
    
    def builtins(self):
        '''
        Load default configurations.
        '''
        # For simple environment, only add a section with its driver value
        for env in (ChromeEnv, FirefoxEnv, IeEnv):
            # Get the name of the section
            section = self.get_section(env.key)
            # Add the section
            self.parser.add_section(section)
            # Add the driver value
            self.parser.set(section, 'webdriver', env.key)
        # For the remote driver, create an entry for each capabilities
        for capabilities in [ c.lower() for c in dir(DesiredCapabilities) if c == c.upper() ] :
            # Get the name of the section
            section = self.get_section('remote-%s' % capabilities)
            # Add the section
            self.parser.add_section(section)
            # Set the driver value
            self.parser.set(section, 'webdriver', 'remote')
            # Set the capabilities
            self.parser.set(section, 'desired_capabilities', capabilities)
    
    def get_section(self, name):
        '''
        Get the name of the section given the name of the environment.
        '''
        return 'selenium-driver:%s' % name
    
    def getenv(self, name):
        '''
        Get the environment from its name.
        '''
        # Get the name of the section
        section = self.get_section(name)
        # Get the option that lets us distinguish the right environment class
        option = 'webdriver'
        # Get the driver value
        driver = self.parser.get(section, option)
        # Deduce the environment class
        env = self.envs.get(driver)
        # If not found, this is an unexpected value
        if not env:
            # This is an error
            raise ValueError('invalid value for %s.%s: %s' % (section, option, driver))
        # Create a new instance for the environment
        return env(name, self.parser, section, self.server)

                
コード例 #37
0
 def get(self, section, option, *, raw=False, vars=None, fallback=_UNSET):
     val = RawConfigParser.get(self, section, option)
     return val.strip('"').strip("'")
コード例 #38
0
ファイル: nirx.py プロジェクト: viguix/mne-python
    def __init__(self, fname, preload=False, verbose=None):
        from ...externals.pymatreader import read_mat
        from ...coreg import get_mni_fiducials  # avoid circular import prob
        logger.info('Loading %s' % fname)

        if fname.endswith('.hdr'):
            fname = op.dirname(op.abspath(fname))

        if not op.isdir(fname):
            raise RuntimeError('The path you specified does not exist.')

        # Check if required files exist and store names for later use
        files = dict()
        keys = ('hdr', 'inf', 'set', 'tpl', 'wl1', 'wl2', 'config.txt',
                'probeInfo.mat')
        for key in keys:
            files[key] = glob.glob('%s/*%s' % (fname, key))
            if len(files[key]) != 1:
                raise RuntimeError('Expect one %s file, got %d' % (
                    key,
                    len(files[key]),
                ))
            files[key] = files[key][0]
        if len(glob.glob('%s/*%s' % (fname, 'dat'))) != 1:
            warn("A single dat file was expected in the specified path, but "
                 "got %d. This may indicate that the file structure has been "
                 "modified since the measurement was saved." %
                 (len(glob.glob('%s/*%s' % (fname, 'dat')))))

        # Read number of rows/samples of wavelength data
        last_sample = -1
        with _open(files['wl1']) as fid:
            for line in fid:
                last_sample += 1

        # Read header file
        # The header file isn't compliant with the configparser. So all the
        # text between comments must be removed before passing to parser
        with _open(files['hdr']) as f:
            hdr_str = f.read()
        hdr_str = re.sub('#.*?#', '', hdr_str, flags=re.DOTALL)
        hdr = RawConfigParser()
        hdr.read_string(hdr_str)

        # Check that the file format version is supported
        if not any(item == hdr['GeneralInfo']['NIRStar']
                   for item in ["\"15.0\"", "\"15.2\""]):
            raise RuntimeError('MNE does not support this NIRStar version'
                               ' (%s)' % (hdr['GeneralInfo']['NIRStar'], ))
        if "NIRScout" not in hdr['GeneralInfo']['Device']:
            warn("Only import of data from NIRScout devices have been "
                 "thoroughly tested. You are using a %s device. " %
                 hdr['GeneralInfo']['Device'])

        # Parse required header fields

        # Extract frequencies of light used by machine
        fnirs_wavelengths = [
            int(s) for s in re.findall(r'(\d+)', hdr['ImagingParameters']
                                       ['Wavelengths'])
        ]

        # Extract source-detectors
        sources = np.asarray([
            int(s) for s in re.findall(r'(\d+)-\d+:\d+', hdr['DataStructure']
                                       ['S-D-Key'])
        ], int)
        detectors = np.asarray([
            int(s) for s in re.findall(r'\d+-(\d+):\d+', hdr['DataStructure']
                                       ['S-D-Key'])
        ], int)

        # Determine if short channels are present and on which detectors
        if 'shortbundles' in hdr['ImagingParameters']:
            short_det = [
                int(s) for s in re.findall(
                    r'(\d+)', hdr['ImagingParameters']['ShortDetIndex'])
            ]
            short_det = np.array(short_det, int)
        else:
            short_det = []

        # Extract sampling rate
        samplingrate = float(hdr['ImagingParameters']['SamplingRate'])

        # Read participant information file
        inf = ConfigParser(allow_no_value=True)
        inf.read(files['inf'])
        inf = inf._sections['Subject Demographics']

        # Store subject information from inf file in mne format
        # Note: NIRX also records "Study Type", "Experiment History",
        #       "Additional Notes", "Contact Information" and this information
        #       is currently discarded
        subject_info = {}
        names = inf['name'].split()
        if len(names) > 0:
            subject_info['first_name'] = \
                inf['name'].split()[0].replace("\"", "")
        if len(names) > 1:
            subject_info['last_name'] = \
                inf['name'].split()[-1].replace("\"", "")
        if len(names) > 2:
            subject_info['middle_name'] = \
                inf['name'].split()[-2].replace("\"", "")
        # subject_info['birthday'] = inf['age']  # TODO: not formatted properly
        subject_info['sex'] = inf['gender'].replace("\"", "")
        # Recode values
        if subject_info['sex'] in {'M', 'Male', '1'}:
            subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_MALE
        elif subject_info['sex'] in {'F', 'Female', '2'}:
            subject_info['sex'] = FIFF.FIFFV_SUBJ_SEX_FEMALE
        # NIRStar does not record an id, or handedness by default

        # Read information about probe/montage/optodes
        # A word on terminology used here:
        #   Sources produce light
        #   Detectors measure light
        #   Sources and detectors are both called optodes
        #   Each source - detector pair produces a channel
        #   Channels are defined as the midpoint between source and detector
        mat_data = read_mat(files['probeInfo.mat'], uint16_codec=None)
        requested_channels = mat_data['probeInfo']['probes']['index_c']
        src_locs = mat_data['probeInfo']['probes']['coords_s3'] / 100.
        det_locs = mat_data['probeInfo']['probes']['coords_d3'] / 100.
        ch_locs = mat_data['probeInfo']['probes']['coords_c3'] / 100.

        # These are all in MNI coordinates, so let's transform them to
        # the Neuromag head coordinate frame
        mri_head_t, _ = _get_trans('fsaverage', 'mri', 'head')
        src_locs = apply_trans(mri_head_t, src_locs)
        det_locs = apply_trans(mri_head_t, det_locs)
        ch_locs = apply_trans(mri_head_t, ch_locs)

        # Set up digitization
        dig = get_mni_fiducials('fsaverage', verbose=False)
        for fid in dig:
            fid['r'] = apply_trans(mri_head_t, fid['r'])
            fid['coord_frame'] = FIFF.FIFFV_COORD_HEAD
        for ii, ch_loc in enumerate(ch_locs, 1):
            dig.append(
                dict(
                    kind=FIFF.FIFFV_POINT_EEG,  # misnomer but probably okay
                    r=ch_loc,
                    ident=ii,
                    coord_frame=FIFF.FIFFV_COORD_HEAD,
                ))
        dig = _format_dig_points(dig)
        del mri_head_t

        # Determine requested channel indices
        # The wl1 and wl2 files include all possible source - detector pairs.
        # But most of these are not relevant. We want to extract only the
        # subset requested in the probe file
        req_ind = np.array([], int)
        for req_idx in range(requested_channels.shape[0]):
            sd_idx = np.where((sources == requested_channels[req_idx][0])
                              & (detectors == requested_channels[req_idx][1]))
            req_ind = np.concatenate((req_ind, sd_idx[0]))
        req_ind = req_ind.astype(int)

        # Generate meaningful channel names
        def prepend(list, str):
            str += '{0}'
            list = [str.format(i) for i in list]
            return (list)

        snames = prepend(sources[req_ind], 'S')
        dnames = prepend(detectors[req_ind], '_D')
        sdnames = [m + str(n) for m, n in zip(snames, dnames)]
        sd1 = [s + ' ' + str(fnirs_wavelengths[0]) for s in sdnames]
        sd2 = [s + ' ' + str(fnirs_wavelengths[1]) for s in sdnames]
        chnames = [val for pair in zip(sd1, sd2) for val in pair]

        # Create mne structure
        info = create_info(chnames,
                           samplingrate,
                           ch_types='fnirs_cw_amplitude')
        info.update(subject_info=subject_info, dig=dig)

        # Store channel, source, and detector locations
        # The channel location is stored in the first 3 entries of loc.
        # The source location is stored in the second 3 entries of loc.
        # The detector location is stored in the third 3 entries of loc.
        # NIRx NIRSite uses MNI coordinates.
        # Also encode the light frequency in the structure.
        for ch_idx2 in range(requested_channels.shape[0]):
            # Find source and store location
            src = int(requested_channels[ch_idx2, 0]) - 1
            info['chs'][ch_idx2 * 2]['loc'][3:6] = src_locs[src, :]
            info['chs'][ch_idx2 * 2 + 1]['loc'][3:6] = src_locs[src, :]
            # Find detector and store location
            det = int(requested_channels[ch_idx2, 1]) - 1
            info['chs'][ch_idx2 * 2]['loc'][6:9] = det_locs[det, :]
            info['chs'][ch_idx2 * 2 + 1]['loc'][6:9] = det_locs[det, :]
            # Store channel location
            # Channel locations for short channels are bodged,
            # for short channels use the source location.
            if det + 1 in short_det:
                info['chs'][ch_idx2 * 2]['loc'][:3] = src_locs[src, :]
                info['chs'][ch_idx2 * 2 + 1]['loc'][:3] = src_locs[src, :]
            else:
                info['chs'][ch_idx2 * 2]['loc'][:3] = ch_locs[ch_idx2, :]
                info['chs'][ch_idx2 * 2 + 1]['loc'][:3] = ch_locs[ch_idx2, :]
            info['chs'][ch_idx2 * 2]['loc'][9] = fnirs_wavelengths[0]
            info['chs'][ch_idx2 * 2 + 1]['loc'][9] = fnirs_wavelengths[1]

        # Extract the start/stop numbers for samples in the CSV. In theory the
        # sample bounds should just be 10 * the number of channels, but some
        # files have mixed \n and \n\r endings (!) so we can't rely on it, and
        # instead make a single pass over the entire file at the beginning so
        # that we know how to seek and read later.
        bounds = dict()
        for key in ('wl1', 'wl2'):
            offset = 0
            bounds[key] = [offset]
            with open(files[key], 'rb') as fid:
                for line in fid:
                    offset += len(line)
                    bounds[key].append(offset)
                assert offset == fid.tell()

        # Extras required for reading data
        raw_extras = {
            'sd_index': req_ind,
            'files': files,
            'bounds': bounds,
        }

        super(RawNIRX, self).__init__(info,
                                      preload,
                                      filenames=[fname],
                                      last_samps=[last_sample],
                                      raw_extras=[raw_extras],
                                      verbose=verbose)

        # Read triggers from event file
        if op.isfile(files['hdr'][:-3] + 'evt'):
            with _open(files['hdr'][:-3] + 'evt') as fid:
                t = [re.findall(r'(\d+)', line) for line in fid]
            onset = np.zeros(len(t), float)
            duration = np.zeros(len(t), float)
            description = [''] * len(t)
            for t_idx in range(len(t)):
                binary_value = ''.join(t[t_idx][1:])[::-1]
                trigger_frame = float(t[t_idx][0])
                onset[t_idx] = (trigger_frame) * (1.0 / samplingrate)
                duration[t_idx] = 1.0  # No duration info stored in files
                description[t_idx] = int(binary_value, 2) * 1.
            annot = Annotations(onset, duration, description)
            self.set_annotations(annot)
コード例 #39
0
def set_config():
    sys_para = sys.argv
    file_path = os.path.split(sys_para[0])[0]
    gui = False
    if platform.uname()[0] == 'Windows':  # Win默认打开
        gui = True
    if platform.uname()[0] == 'Linux':  # Linux 默认关闭
        gui = False
    if '--gui' in sys.argv:  # 指定 gui 模式
        gui = True
    if '--nogui' in sys.argv:  # 带 nogui 就覆盖前面Win打开要求
        gui = False

    config_file = os.path.join(file_path, 's3_download_config.ini')
    # If no config file, read the default config
    if not os.path.exists(config_file):
        config_file += '.default'
        print("No customized config, use the default config")
    cfg = ConfigParser()
    print(f'Reading config file: {config_file}')

    try:
        global SrcBucket, S3Prefix, SrcFileIndex, SrcProfileName, DesDir, MaxRetry, MaxThread, MaxParallelFile, LoggingLevel
        cfg.read(config_file, encoding='utf-8-sig')
        SrcBucket = cfg.get('Basic', 'SrcBucket')
        S3Prefix = cfg.get('Basic', 'S3Prefix')
        SrcFileIndex = cfg.get('Basic', 'SrcFileIndex')
        SrcProfileName = cfg.get('Basic', 'SrcProfileName')
        DesDir = cfg.get('Basic', 'DesDir')
        Megabytes = 1024 * 1024
        ChunkSize = cfg.getint('Advanced', 'ChunkSize') * Megabytes
        MaxRetry = cfg.getint('Advanced', 'MaxRetry')
        MaxThread = cfg.getint('Advanced', 'MaxThread')
        MaxParallelFile = cfg.getint('Advanced', 'MaxParallelFile')
        LoggingLevel = cfg.get('Advanced', 'LoggingLevel')
    except Exception as e:
        print("ERR loading s3_download_config.ini", str(e))
        input('PRESS ENTER TO QUIT')
        sys.exit(0)

    if gui:
        # For GUI
        from tkinter import Tk, filedialog, END, StringVar, BooleanVar, messagebox
        from tkinter.ttk import Combobox, Label, Button, Entry, Spinbox, Checkbutton
        # get profile name list in ./aws/credentials
        pro_conf = RawConfigParser()
        pro_path = os.path.join(os.path.expanduser("~"), ".aws")
        cre_path = os.path.join(pro_path, "credentials")
        if os.path.exists(cre_path):
            pro_conf.read(cre_path)
            profile_list = pro_conf.sections()
        else:
            print(
                f"There is no aws_access_key in {cre_path}, please input for S3 Bucket: "
            )
            os.mkdir(pro_path)
            aws_access_key_id = input('aws_access_key_id: ')
            aws_secret_access_key = input('aws_secret_access_key: ')
            region = input('region: ')
            pro_conf.add_section('default')
            pro_conf['default']['aws_access_key_id'] = aws_access_key_id
            pro_conf['default'][
                'aws_secret_access_key'] = aws_secret_access_key
            pro_conf['default']['region'] = region
            profile_list = ['default']
            with open(cre_path, 'w') as f:
                print(f"Saving credentials to {cre_path}")
                pro_conf.write(f)

        # Click Select Folder
        def browse_folder():
            local_dir = filedialog.askdirectory(
                initialdir=os.path.dirname(__file__))
            url_txt.delete(0, END)
            url_txt.insert(0, local_dir)
            file_txt.delete(0, END)
            file_txt.insert(0, "*")
            # Finsih browse folder

        # Click List Buckets
        def ListBuckets(*args):
            SrcProfileName = SrcProfileName_txt.get()
            client = Session(profile_name=SrcProfileName).client('s3')
            bucket_list = []
            try:
                response = client.list_buckets()
                if 'Buckets' in response:
                    bucket_list = [b['Name'] for b in response['Buckets']]
            except Exception as e:
                messagebox.showerror(
                    'Error', f'Failt to List buckets. \n'
                    f'Please verify your aws_access_key of profile: [{SrcProfileName}]\n'
                    f'{str(e)}')
                bucket_list = ['CAN_NOT_GET_BUCKET_LIST']
            SrcBucket_txt['values'] = bucket_list
            SrcBucket_txt.current(0)
            # Finish ListBuckets

        # Click List Prefix
        def ListPrefix(*args):
            SrcProfileName = SrcProfileName_txt.get()
            client = Session(profile_name=SrcProfileName).client('s3')
            prefix_list = []
            this_bucket = SrcBucket_txt.get()
            max_get = 100
            try:
                response = client.list_objects_v2(
                    Bucket=this_bucket, Delimiter='/'
                )  # Only get the max 1000 prefix for simply list
                if 'CommonPrefixes' in response:
                    prefix_list = [
                        c['Prefix'] for c in response['CommonPrefixes']
                    ]
                if not prefix_list:
                    messagebox.showinfo(
                        'Message', f'There is no "/" Prefix in: {this_bucket}')
                if response['IsTruncated']:
                    messagebox.showinfo(
                        'Message',
                        f'More than {max_get} Prefix, cannot fully list here.')
            except Exception as e:
                messagebox.showinfo(
                    'Error',
                    f'Cannot get prefix list from bucket: {this_bucket}, {str(e)}'
                )
            S3Prefix_txt['values'] = prefix_list
            S3Prefix_txt.current(0)
            # Finish list prefix

        def browse_file(*args):
            SrcProfileName = SrcProfileName_txt.get()
            S3Prefix = S3Prefix_txt.get()
            client = Session(profile_name=SrcProfileName).client('s3')
            file_list = []
            this_bucket = SrcBucket_txt.get()
            max_get = 100
            try:
                response = client.list_objects_v2(
                    Bucket=this_bucket,
                    Prefix=str(PurePosixPath(S3Prefix)) + '/',
                    Delimiter='/'
                )  # Only get the max 1000 files for simply list

                # For delete prefix in des_prefix
                if S3Prefix == '' or S3Prefix == '/':
                    # 目的bucket没有设置 Prefix
                    dp_len = 0
                else:
                    # 目的bucket的 "prefix/"长度
                    dp_len = len(str(PurePosixPath(S3Prefix))) + 1

                if 'Contents' in response:
                    file_list = [
                        c['Key'][dp_len:] for c in response['Contents']
                    ]  # 去掉Prefix
                if not file_list:
                    messagebox.showinfo(
                        'Message',
                        f'There is no files in s3://{this_bucket}/{S3Prefix}')
                if response['IsTruncated']:
                    messagebox.showinfo(
                        'Message',
                        f'More than {max_get} files, cannot fully list here.')
            except Exception as e:
                messagebox.showinfo(
                    'Error',
                    f'Cannot get file list from bucket s3://{this_bucket}/{S3Prefix}, {str(e)}'
                )
            file_txt['values'] = file_list
            file_txt.current(0)
            # Finish list files

        # Click START button
        def close():
            window.withdraw()
            ok = messagebox.askokcancel(
                'Start downloading job',
                f'DOWNLOAD FROM s3://{SrcBucket_txt.get()}/{S3Prefix_txt.get()}\n'
                f'TO LOCAL {url_txt.get()}\n'
                f'Click OK to START')
            if not ok:
                window.deiconify()
                return
            window.quit()
            return
            # Finish close()

        # Start GUI
        window = Tk()
        window.title(
            "LONGBOW - AMAZON S3 DOWNLOAD TOOL WITH BREAK-POINT RESUMING")
        window.geometry('705x350')
        window.configure(background='#ECECEC')
        window.protocol("WM_DELETE_WINDOW", sys.exit)

        Label(window, text="S3 Bucket").grid(column=0,
                                             row=1,
                                             sticky='w',
                                             padx=2,
                                             pady=2)
        SrcBucket_txt = Combobox(window, width=48)
        SrcBucket_txt.grid(column=1, row=1, sticky='w', padx=2, pady=2)
        SrcBucket_txt['values'] = SrcBucket
        SrcBucket_txt.current(0)
        Button(window, text="List Buckets", width=10, command=ListBuckets) \
            .grid(column=2, row=1, sticky='w', padx=2, pady=2)

        Label(window, text="S3 Prefix").grid(column=0,
                                             row=2,
                                             sticky='w',
                                             padx=2,
                                             pady=2)
        S3Prefix_txt = Combobox(window, width=48)
        S3Prefix_txt.grid(column=1, row=2, sticky='w', padx=2, pady=2)
        S3Prefix_txt['values'] = S3Prefix
        if S3Prefix != '':
            S3Prefix_txt.current(0)
        Button(window, text="List Prefix", width=10, command=ListPrefix) \
            .grid(column=2, row=2, sticky='w', padx=2, pady=2)

        Label(window, text="Filename or *").grid(column=0,
                                                 row=3,
                                                 sticky='w',
                                                 padx=2,
                                                 pady=2)
        file_txt = Combobox(window, width=48)
        file_txt.grid(column=1, row=3, sticky='w', padx=2, pady=2)
        file_txt['values'] = SrcFileIndex
        if SrcFileIndex != '':
            file_txt.current(0)
        Button(window, text="Select File", width=10, command=browse_file) \
            .grid(column=2, row=3, sticky='w', padx=2, pady=2)

        Label(window, text="AWS Profile").grid(column=0,
                                               row=4,
                                               sticky='w',
                                               padx=2,
                                               pady=2)
        SrcProfileName_txt = Combobox(window, width=15, state="readonly")
        SrcProfileName_txt['values'] = tuple(profile_list)
        SrcProfileName_txt.grid(column=1, row=4, sticky='w', padx=2, pady=2)
        if SrcProfileName in profile_list:
            position = profile_list.index(SrcProfileName)
            SrcProfileName_txt.current(position)
        else:
            SrcProfileName_txt.current(0)
        SrcProfileName = SrcProfileName_txt.get()
        SrcProfileName_txt.bind("<<ComboboxSelected>>", ListBuckets)

        Label(window, text="Folder").grid(column=0,
                                          row=5,
                                          sticky='w',
                                          padx=2,
                                          pady=2)
        url_txt = Entry(window, width=50)
        url_txt.grid(column=1, row=5, sticky='w', padx=2, pady=2)
        url_btn = Button(window,
                         text="Select Folder",
                         width=10,
                         command=browse_folder)
        url_btn.grid(column=2, row=5, sticky='w', padx=2, pady=2)
        url_txt.insert(0, DesDir)

        Label(window, text="MaxThread/File").grid(column=0,
                                                  row=6,
                                                  sticky='w',
                                                  padx=2,
                                                  pady=2)
        if MaxThread < 1 or MaxThread > 100:
            MaxThread = 5
        var_t = StringVar()
        var_t.set(str(MaxThread))
        MaxThread_txt = Spinbox(window,
                                from_=1,
                                to=100,
                                width=15,
                                textvariable=var_t)
        MaxThread_txt.grid(column=1, row=6, sticky='w', padx=2, pady=2)

        Label(window, text="MaxParallelFile").grid(column=0,
                                                   row=7,
                                                   sticky='w',
                                                   padx=2,
                                                   pady=2)
        if MaxParallelFile < 1 or MaxParallelFile > 100:
            MaxParallelFile = 5
        var_f = StringVar()
        var_f.set(str(MaxParallelFile))
        MaxParallelFile_txt = Spinbox(window,
                                      from_=1,
                                      to=100,
                                      width=15,
                                      textvariable=var_f)
        MaxParallelFile_txt.grid(column=1, row=7, sticky='w', padx=2, pady=2)

        save_config = BooleanVar()
        save_config.set(True)
        save_config_txt = Checkbutton(window,
                                      text="Save to s3_download_config.ini",
                                      var=save_config)
        save_config_txt.grid(column=1, row=9, padx=2, pady=2)

        Button(window, text="Start Download", width=15,
               command=close).grid(column=1, row=10, padx=5, pady=5)
        window.mainloop()

        DesDir = url_txt.get()
        SrcFileIndex = file_txt.get()
        SrcBucket = SrcBucket_txt.get()
        S3Prefix = S3Prefix_txt.get()
        SrcProfileName = SrcProfileName_txt.get()
        MaxThread = int(MaxThread_txt.get())
        MaxParallelFile = int(MaxParallelFile_txt.get())

        if save_config:
            cfg['Basic']['SrcBucket'] = SrcBucket
            cfg['Basic']['S3Prefix'] = S3Prefix
            cfg['Basic']['SrcFileIndex'] = SrcFileIndex
            cfg['Basic']['SrcProfileName'] = SrcProfileName
            cfg['Basic']['DesDir'] = DesDir
            cfg['Advanced']['MaxThread'] = str(MaxThread)
            cfg['Advanced']['MaxParallelFile'] = str(MaxParallelFile)

            config_file = os.path.join(file_path, 's3_download_config.ini')
            with codecs.open(config_file, 'w', 'utf-8') as f:
                cfg.write(f)
                print(f"Save config to {config_file}")
        # GUI window finish

    if S3Prefix == '/':
        S3Prefix = ''
    # Finish set_config()
    return ChunkSize
コード例 #40
0
def save_config_file(cfg: configparser.RawConfigParser, path: str):
    with open(path, 'w') as configfile:
        cfg.write(configfile)
コード例 #41
0
ファイル: ipdl.py プロジェクト: DenizYunus/Waterfox
log(2, 'Generated C++ headers will be generated relative to "%s"', headersdir)
log(2, 'Generated C++ sources will be generated in "%s"', cppdir)

allmessages = {}
allmessageprognames = []
allprotocols = []


def normalizedFilename(f):
    if f == '-':
        return '<stdin>'
    return f


log(2, 'Reading sync message list')
parser = RawConfigParser()
parser.readfp(open(options.syncMsgList))
syncMsgList = parser.sections()

for section in syncMsgList:
    if not parser.get(section, "description"):
        print('Error: Sync message %s lacks a description' % section,
              file=sys.stderr)
        sys.exit(1)

# Read message metadata. Right now we only have 'segment_capacity'
# for the standard segment size used for serialization.
log(2, 'Reading message metadata...')
msgMetadataConfig = RawConfigParser()
msgMetadataConfig.readfp(open(options.msgMetadata))
コード例 #42
0
ファイル: settings.py プロジェクト: Skchoudhary/stacknews
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.10/ref/settings/
"""

import os

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
from configparser import RawConfigParser

BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
CONFIG_PATH = "stacknews/config.ini"

# Read config file for setting up the project
CONFIG_FILE = os.path.join(BASE_DIR, CONFIG_PATH)
config = RawConfigParser()
config.read(CONFIG_FILE)

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.10/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*$^wh$#w+c%1#zak(5jn3$&fs78ggis$f5w84yr-ms$j!wq8mp'

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

ALLOWED_HOSTS = []

# Application definition
コード例 #43
0
ファイル: units.py プロジェクト: mdonnelly1/OpenMDAO
def import_library(libfilepointer):
    """
    Import a units library, replacing any existing definitions.

    Parameters
    ----------
    libfilepointer : file
        new library file to work with

    Returns
    -------
    ConfigParser
        newly updated units library for the module
    """
    global _UNIT_LIB
    global _UNIT_CACHE
    _UNIT_CACHE = {}
    _UNIT_LIB = ConfigParser()
    _UNIT_LIB.optionxform = _do_nothing

    # New in Python 3.2: read_file() replaces readfp().
    if sys.version_info >= (3, 2):
        _UNIT_LIB.read_file(libfilepointer)
    else:
        _UNIT_LIB.readfp(libfilepointer)

    required_base_types = ['length', 'mass', 'time', 'temperature', 'angle']
    _UNIT_LIB.base_names = list()
    # used to is_angle() and other base type checking
    _UNIT_LIB.base_types = dict()
    _UNIT_LIB.unit_table = dict()
    _UNIT_LIB.prefixes = dict()
    _UNIT_LIB.help = list()

    for prefix, factor in _UNIT_LIB.items('prefixes'):
        factor, comma, comment = factor.partition(',')
        _UNIT_LIB.prefixes[prefix] = float(factor)

    base_list = [0] * len(_UNIT_LIB.items('base_units'))

    for i, (unit_type, name) in enumerate(_UNIT_LIB.items('base_units')):
        _UNIT_LIB.base_types[unit_type] = i
        powers = list(base_list)
        powers[i] = 1
        # print '%20s'%unit_type, powers
        # cant use add_unit because no base units exist yet
        _new_unit(name, 1, powers)
        _UNIT_LIB.base_names.append(name)

    # test for required base types
    missing = [utype for utype in required_base_types
               if utype not in _UNIT_LIB.base_types]
    if missing:
        raise ValueError("Not all required base types were present in the config file. missing: "
                         f"{missing}, at least {required_base_types} required.")

    _update_library(_UNIT_LIB)
    return _UNIT_LIB
コード例 #44
0
# use your own settings
# add the following if doesnt exist in file

# additionals
import os
import sys
from configparser import RawConfigParser
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

# ----------------------------------------
# Sensitive settings into another location /etc/
# ----------------------------------------
config = RawConfigParser()
config.read(r'settings/mysettings.ini')
# SECURITY WARNING: keep the secret key used in production secret!

# ----------------------------------------------------------
# Elasticsearch
# ----------------------------------------------------------
ES_SOURCE = config.get('elasticsearch', 'ES_SOURCE')
ES_TARGET = config.get('elasticsearch', 'ES_TARGET')
ES_RECONNECT_CNT = float(config.get('elasticsearch', 'ES_RECONNECT_CNT'))
ES_REQUEST_TIMEOUT = float(config.get('elasticsearch', 'ES_REQUEST_TIMEOUT'))
ES_TARGET_INDEX_AUDIT = config.get('elasticsearch', 'ES_TARGET_INDEX_AUDIT')
コード例 #45
0
ファイル: util.py プロジェクト: xtmgah/MutationMotif
def get_plot_configs(cfg_path=None):
    """returns a config object with plotting settings"""
    defaults = dict(xlabel_fontsize=14,
                    ylabel_fontsize=14,
                    xtick_fontsize=12,
                    ytick_fontsize=12,
                    xlabel_pad=0.01,
                    ylabel_pad=0.01)

    figwidths = {
        '1-way plot': 2.25,
        '2-way plot': 9,
        '3-way plot': 9,
        '4-way plot': 9,
        'summary plot': 9,
        'grid': 8
    }
    figsizes = {
        '1-way plot': (9, 3),
        '2-way plot': (9, 9),
        '3-way plot': (9, 9),
        '4-way plot': (9, 9),
        'summary plot': (9, 9),
        'grid': (8, 8)
    }

    config = RawConfigParser()
    for section in [
            '1-way plot', '2-way plot', '3-way plot', '4-way plot',
            'summary plot', 'grid'
    ]:
        config.add_section(section)
        config.set(section, 'figwidth', figwidths[section])
        config.set(section, 'figsize', figsizes[section])
        for arg, default in list(defaults.items()):
            config.set(section, arg, default)
    if cfg_path:
        # load the user provided config
        user_config = RawConfigParser(allow_no_value=True)
        try:
            user_config.read(cfg_path)
        except ParsingError as err:
            msg = 'Could not parse %s: %s' % (cfg_path, err)
            raise ParsingError(msg)

        # update the default settings
        for section in config.sections():
            for key, val in config.items(section):
                try:
                    new_val = user_config.get(section, key)
                    config.set(section, key, eval(new_val))
                except (NoSectionError, NoOptionError):
                    pass
    return config
コード例 #46
0
ファイル: glovar.py プロジェクト: wonkru-bot/scp-079-pm
date_reset: str = ""
flood_ban: int = 0
flood_limit: int = 0
flood_time: int = 0
host_id: int = 0
host_name: str = ""
per_page: int = 0
project_link: str = ""
project_name: str = ""
zh_cn: Union[str, bool] = ""

# [encrypt]
password: str = ""

try:
    config = RawConfigParser()
    config.read("config.ini")

    # [basic]
    bot_token = config["basic"].get("bot_token", bot_token)
    prefix = list(config["basic"].get("prefix", prefix_str))

    # [channels]
    critical_channel_id = int(config["channels"].get("critical_channel_id",
                                                     str(critical_channel_id)))
    debug_channel_id = int(config["channels"].get("debug_channel_id",
                                                  str(debug_channel_id)))
    exchange_channel_id = int(config["channels"].get("exchange_channel_id",
                                                     str(exchange_channel_id)))
    hide_channel_id = int(config["channels"].get("hide_channel_id",
                                                 str(hide_channel_id)))
コード例 #47
0
ファイル: pophttp.py プロジェクト: Andrew-Brock/pophttp
    def __init__(self, filename):
        config = RawConfigParser()
        config.read(filename)

        self.switches = []
        self.default_url = Config._get_val(config, 'settings', 'default_url',
                                           None)
        self.interface = Config._get_val(config, 'settings', 'interface',
                                         '0.0.0.0')
        self.ip_filter = Config._get_val(config, 'settings', 'ip_filter',
                                         '0.0.0.0/0').split('/')

        self.ip_filter[0] = struct.unpack('>L',
                                          socket.inet_aton(
                                              self.ip_filter[0]))[0]
        if len(self.ip_filter) == 1:
            self.ip_filter.append(32)
        elif len(self.ip_filter) == 2:
            self.ip_filter[1] = int(self.ip_filter[1])
        else:
            raise ConfigError('Bad IP address format specified for IP filter')

        if config.has_section('switches'):
            for cfg, url in config.items('switches'):
                parsed_cfg = dict(h=None, s=None, b=None, k=None, p=None)
                for param in cfg.lower().split(','):
                    if param in ('on', 'off'):
                        parsed_cfg['p'] = param == 'on'
                    elif param[-1] in parsed_cfg:
                        parsed_cfg[param[-1]] = int(param[:-1])
                    else:
                        raise ConfigError(
                            'Unknown parameter %s while parsing %s = %s' %
                            (param[-1], cfg, url))
                self.switches.append((parsed_cfg, url))

        #special config for specific URLs
        url_openers = []
        for top_level_url in config.sections():
            if not top_level_url.startswith(
                    'http://') and top_level_url.startswith('https://'):
                continue
            auth = Config._get_val(config, top_level_url, 'auth', None)
            if auth == 'basic':
                username = Config._get_val(config, top_level_url, 'username',
                                           None)
                password = Config._get_val(config, top_level_url, 'password',
                                           None)

                if username is None:
                    raise ConfigError(
                        "'username' parameter is required when using basic HTTP authentication"
                    )
                if password is None:
                    raise ConfigError(
                        "'password' parameter is required when using basic HTTP authentication"
                    )

                password_mgr = HTTPPasswordMgrWithDefaultRealm()
                password_mgr.add_password(None, top_level_url, username,
                                          password)
                url_openers.append(HTTPBasicAuthHandler(password_mgr))
        install_opener(build_opener(*url_openers))
コード例 #48
0
ファイル: units.py プロジェクト: mdonnelly1/OpenMDAO
    if comment:
        _UNIT_LIB.help.append((name, comment, unit))
    if isinstance(unit, str):
        unit = eval(unit, {'__builtins__': None, 'pi': pi},
                    _UNIT_LIB.unit_table)
    unit.set_name(name)
    if name in _UNIT_LIB.unit_table:
        if (_UNIT_LIB.unit_table[name]._factor != unit._factor or
                _UNIT_LIB.unit_table[name]._powers != unit._powers):
            raise KeyError(f"Unit '{name}' already defined with different factor or powers.")

    _UNIT_LIB.unit_table[name] = unit
    _UNIT_LIB.set('units', name, unit)


_UNIT_LIB = ConfigParser()


def _do_nothing(string):
    """
    Make the ConfigParser case sensitive.

    Defines an optionxform for the units configparser that
    does nothing, resulting in a case-sensitive parser.

    Parameters
    ----------
    string : str
        The string to be transformed for the ConfigParser

    Returns
コード例 #49
0
ファイル: config.py プロジェクト: alistair23/misc-files
#
# Copyright IBM, Corp. 2013
#
# Authors:
#  Anthony Liguori <*****@*****.**>
#
# This work is licensed under the terms of the GNU GPLv2 or later.
# See the COPYING file in the top-level directory.
#

from collections.abc import Iterable
from configparser import RawConfigParser
import email.utils
import os

ini = RawConfigParser()
config_filename = None


def setup(filename):
    global config_filename

    if not filename:
        dirs = os.getcwd().split('/')
        for i in range(0, len(dirs)):
            if i:
                path = '/'.join(dirs[0:-i])
            else:
                path = '/'.join(dirs)

            path += '/.patchesrc'
コード例 #50
0
from django.core.management.base import BaseCommand
from mainapp.models import Product, ProductCategory
from authapp.models import ShopUser
from configparser import RawConfigParser
from django.conf import settings

import json, os

JSON_PATH = 'mainapp/json'

local_config_path = os.path.join(settings.BASE_DIR, 'config', 'local.conf')
config = RawConfigParser()
config.read(local_config_path)


def load_from_json(file_name):
    with open(os.path.join(JSON_PATH, file_name + '.json'),
              'r',
              encoding='UTF-8') as infile:
        return json.load(infile)


class Command(BaseCommand):
    def handle(self, *args, **options):
        categories = load_from_json('categories')

        ProductCategory.objects.all().delete()
        for category in categories:
            new_category = ProductCategory(**category)
            new_category.save()
コード例 #51
0
class Cache(object):
    """Client Side cache
    """

    class RepositoryReplay(Error):
        """Cache is newer than repository, refusing to continue"""

    def __init__(self, repository, key, manifest):
        self.timestamp = None
        self.txn_active = False
        self.repository = repository
        self.key = key
        self.manifest = manifest
        self.path = os.path.join(get_cache_dir(), hexlify(repository.id).decode('ascii'))
        if not os.path.exists(self.path):
            self.create()
        self.open()
        if self.manifest.id != self.manifest_id:
            # If repository is older than the cache something fishy is going on
            if self.timestamp and self.timestamp > manifest.timestamp:
                raise self.RepositoryReplay()
            self.sync()
            self.commit()

    def __del__(self):
        self.close()

    def create(self):
        """Create a new empty cache at `path`
        """
        os.makedirs(self.path)
        with open(os.path.join(self.path, 'README'), 'w') as fd:
            fd.write('This is an Attic cache')
        config = RawConfigParser()
        config.add_section('cache')
        config.set('cache', 'version', '1')
        config.set('cache', 'repository', hexlify(self.repository.id).decode('ascii'))
        config.set('cache', 'manifest', '')
        with open(os.path.join(self.path, 'config'), 'w') as fd:
            config.write(fd)
        ChunkIndex.create(os.path.join(self.path, 'chunks').encode('utf-8'))
        with open(os.path.join(self.path, 'files'), 'w') as fd:
            pass  # empty file

    def open(self):
        if not os.path.isdir(self.path):
            raise Exception('%s Does not look like an Attic cache' % self.path)
        self.lock = UpgradableLock(os.path.join(self.path, 'config'), exclusive=True)
        self.rollback()
        self.config = RawConfigParser()
        self.config.read(os.path.join(self.path, 'config'))
        if self.config.getint('cache', 'version') != 1:
            raise Exception('%s Does not look like an Attic cache')
        self.id = self.config.get('cache', 'repository')
        self.manifest_id = unhexlify(self.config.get('cache', 'manifest'))
        self.timestamp = self.config.get('cache', 'timestamp', fallback=None)
        self.chunks = ChunkIndex(os.path.join(self.path, 'chunks').encode('utf-8'))
        self.files = None

    def close(self):
        self.lock.release()

    def _read_files(self):
        self.files = {}
        self._newest_mtime = 0
        with open(os.path.join(self.path, 'files'), 'rb') as fd:
            u = msgpack.Unpacker(use_list=True)
            while True:
                data = fd.read(64 * 1024)
                if not data:
                    break
                u.feed(data)
                for hash, item in u:
                        item[0] += 1
                        self.files[hash] = item

    def begin_txn(self):
        # Initialize transaction snapshot
        txn_dir = os.path.join(self.path, 'txn.tmp')
        os.mkdir(txn_dir)
        shutil.copy(os.path.join(self.path, 'config'), txn_dir)
        shutil.copy(os.path.join(self.path, 'chunks'), txn_dir)
        shutil.copy(os.path.join(self.path, 'files'), txn_dir)
        os.rename(os.path.join(self.path, 'txn.tmp'),
                  os.path.join(self.path, 'txn.active'))
        self.txn_active = True

    def commit(self):
        """Commit transaction
        """
        if not self.txn_active:
            return
        if self.files is not None:
            with open(os.path.join(self.path, 'files'), 'wb') as fd:
                for item in self.files.items():
                    # Discard cached files with the newest mtime to avoid
                    # issues with filesystem snapshots and mtime precision
                    if item[1][0] < 10 and item[1][3] < self._newest_mtime:
                        msgpack.pack(item, fd)
        self.config.set('cache', 'manifest', hexlify(self.manifest.id).decode('ascii'))
        self.config.set('cache', 'timestamp', self.manifest.timestamp)
        with open(os.path.join(self.path, 'config'), 'w') as fd:
            self.config.write(fd)
        self.chunks.flush()
        os.rename(os.path.join(self.path, 'txn.active'),
                  os.path.join(self.path, 'txn.tmp'))
        shutil.rmtree(os.path.join(self.path, 'txn.tmp'))
        self.txn_active = False

    def rollback(self):
        """Roll back partial and aborted transactions
        """
        # Roll back active transaction
        txn_dir = os.path.join(self.path, 'txn.active')
        if os.path.exists(txn_dir):
            shutil.copy(os.path.join(txn_dir, 'config'), self.path)
            shutil.copy(os.path.join(txn_dir, 'chunks'), self.path)
            shutil.copy(os.path.join(txn_dir, 'files'), self.path)
            os.rename(txn_dir, os.path.join(self.path, 'txn.tmp'))
        # Remove partial transaction
        if os.path.exists(os.path.join(self.path, 'txn.tmp')):
            shutil.rmtree(os.path.join(self.path, 'txn.tmp'))
        self.txn_active = False

    def sync(self):
        """Initializes cache by fetching and reading all archive indicies
        """
        def add(id, size, csize):
            try:
                count, size, csize = self.chunks[id]
                self.chunks[id] = count + 1, size, csize
            except KeyError:
                self.chunks[id] = 1, size, csize
        self.begin_txn()
        print('Initializing cache...')
        self.chunks.clear()
        unpacker = msgpack.Unpacker()
        for name, info in self.manifest.archives.items():
            id = info[b'id']
            cdata = self.repository.get(id)
            data = self.key.decrypt(id, cdata)
            add(id, len(data), len(cdata))
            archive = msgpack.unpackb(data)
            decode_dict(archive, (b'name', b'hostname', b'username', b'time'))  # fixme: argv
            print('Analyzing archive:', archive[b'name'])
            for id, chunk in zip_longest(archive[b'items'], self.repository.get_many(archive[b'items'])):
                data = self.key.decrypt(id, chunk)
                add(id, len(data), len(chunk))
                unpacker.feed(data)
                for item in unpacker:
                    try:
                        for id, size, csize in item[b'chunks']:
                            add(id, size, csize)
                    except KeyError:
                        pass

    def add_chunk(self, id, data, stats):
        if not self.txn_active:
            self.begin_txn()
        if self.seen_chunk(id):
            return self.chunk_incref(id, stats)
        size = len(data)
        data = self.key.encrypt(data)
        csize = len(data)
        self.repository.put(id, data, wait=False)
        self.chunks[id] = (1, size, csize)
        stats.update(size, csize, True)
        return id, size, csize

    def seen_chunk(self, id):
        return self.chunks.get(id, (0, 0, 0))[0]

    def chunk_incref(self, id, stats):
        if not self.txn_active:
            self.begin_txn()
        count, size, csize = self.chunks[id]
        self.chunks[id] = (count + 1, size, csize)
        stats.update(size, csize, False)
        return id, size, csize

    def chunk_decref(self, id):
        if not self.txn_active:
            self.begin_txn()
        count, size, csize = self.chunks[id]
        if count == 1:
            del self.chunks[id]
            self.repository.delete(id, wait=False)
        else:
            self.chunks[id] = (count - 1, size, csize)

    def file_known_and_unchanged(self, path_hash, st):
        if self.files is None:
            self._read_files()
        entry = self.files.get(path_hash)
        if (entry and entry[3] == st_mtime_ns(st)
            and entry[2] == st.st_size and entry[1] == st.st_ino):
            # reset entry age
            self.files[path_hash][0] = 0
            return entry[4]
        else:
            return None

    def memorize_file(self, path_hash, st, ids):
        # Entry: Age, inode, size, mtime, chunk ids
        mtime_ns = st_mtime_ns(st)
        self.files[path_hash] = 0, st.st_ino, st.st_size, mtime_ns, ids
        self._newest_mtime = max(self._newest_mtime, mtime_ns)
コード例 #52
0
class MrxsFile(object):
    def __init__(self, filename):
        # Split filename
        dirname, ext = os.path.splitext(filename)
        if ext != ".mrxs":
            raise UnrecognizedFile

        # Parse slidedat
        self._slidedatfile = os.path.join(dirname, "Slidedat.ini")
        self._dat = RawConfigParser()
        self._dat.optionxform = str
        try:
            with open(self._slidedatfile, "rb") as fh:
                self._have_bom = fh.read(len(UTF8_BOM)) == UTF8_BOM
                if not self._have_bom:
                    fh.seek(0)
                self._dat.readfp(fh)
        except IOError:
            raise UnrecognizedFile

        # Get file paths
        self._indexfile = os.path.join(
            dirname, self._dat.get(MRXS_HIERARCHICAL, "INDEXFILE"))
        self._datafiles = [
            os.path.join(dirname, self._dat.get("DATAFILE", "FILE_%d" % i))
            for i in range(self._dat.getint("DATAFILE", "FILE_COUNT"))
        ]

        # Build levels
        self._make_levels()

    def _make_levels(self):
        self._levels = {}
        self._level_list = []
        layer_count = self._dat.getint(MRXS_HIERARCHICAL, "NONHIER_COUNT")
        for layer_id in range(layer_count):
            level_count = self._dat.getint(MRXS_HIERARCHICAL,
                                           "NONHIER_%d_COUNT" % layer_id)
            for level_id in range(level_count):
                level = MrxsNonHierLevel(self._dat, layer_id, level_id,
                                         len(self._level_list))
                self._levels[(level.layer_name, level.name)] = level
                self._level_list.append(level)

    @classmethod
    def _read_int32(cls, f):
        buf = f.read(4)
        if len(buf) != 4:
            raise IOError("Short read")
        return struct.unpack("<i", buf)[0]

    @classmethod
    def _assert_int32(cls, f, value):
        v = cls._read_int32(f)
        if v != value:
            raise ValueError("%d != %d" % (v, value))

    def _get_data_location(self, record):
        with open(self._indexfile, "rb") as fh:
            fh.seek(MRXS_NONHIER_ROOT_OFFSET)
            # seek to record
            table_base = self._read_int32(fh)
            fh.seek(table_base + record * 4)
            # seek to list head
            list_head = self._read_int32(fh)
            fh.seek(list_head)
            # seek to data page
            self._assert_int32(fh, 0)
            page = self._read_int32(fh)
            fh.seek(page)
            # check pagesize
            self._assert_int32(fh, 1)
            # read rest of prologue
            self._read_int32(fh)
            self._assert_int32(fh, 0)
            self._assert_int32(fh, 0)
            # read values
            position = self._read_int32(fh)
            size = self._read_int32(fh)
            fileno = self._read_int32(fh)
            return (self._datafiles[fileno], position, size)

    def _zero_record(self, record):
        path, offset, length = self._get_data_location(record)
        with open(path, "r+b") as fh:
            fh.seek(0, 2)
            do_truncate = fh.tell() == offset + length
            if DEBUG:
                if do_truncate:
                    print("Truncating", path, "to", offset)
                else:
                    print("Zeroing", path, "at", offset, "for", length)
            fh.seek(offset)
            buf = fh.read(len(JPEG_SOI))
            if buf != JPEG_SOI:
                raise IOError("Unexpected data in nonhier image")
            if do_truncate:
                fh.truncate(offset)
            else:
                fh.seek(offset)
                fh.write("\0" * length)

    def _delete_index_record(self, record):
        if DEBUG:
            print("Deleting record", record)
        with open(self._indexfile, "r+b") as fh:
            entries_to_move = len(self._level_list) - record - 1
            if entries_to_move == 0:
                return
            # get base of table
            fh.seek(MRXS_NONHIER_ROOT_OFFSET)
            table_base = self._read_int32(fh)
            # read tail of table
            fh.seek(table_base + (record + 1) * 4)
            buf = fh.read(entries_to_move * 4)
            if len(buf) != entries_to_move * 4:
                raise IOError("Short read")
            # overwrite the target record
            fh.seek(table_base + record * 4)
            fh.write(buf)

    def _hier_keys_for_level(self, level):
        ret = []
        for k, _ in self._dat.items(MRXS_HIERARCHICAL):
            if k == level.key_prefix or k.startswith(level.key_prefix + "_"):
                ret.append(k)
        return ret

    def _rename_section(self, old, new):
        if self._dat.has_section(old):
            if DEBUG:
                print("[%s] -> [%s]" % (old, new))
            self._dat.add_section(new)
            for k, v in self._dat.items(old):
                self._dat.set(new, k, v)
            self._dat.remove_section(old)
        elif DEBUG:
            print("[%s] does not exist" % old)

    def _delete_section(self, section):
        if DEBUG:
            print("Deleting [%s]" % section)
        self._dat.remove_section(section)

    def _set_key(self, section, key, value):
        if DEBUG:
            prev = self._dat.get(section, key)
            print("[%s] %s: %s -> %s" % (section, key, prev, value))
        self._dat.set(section, key, value)

    def _rename_key(self, section, old, new):
        if DEBUG:
            print("[%s] %s -> %s" % (section, old, new))
        v = self._dat.get(section, old)
        self._dat.remove_option(section, old)
        self._dat.set(section, new, v)

    def _delete_key(self, section, key):
        if DEBUG:
            print("Deleting [%s] %s" % (section, key))
        self._dat.remove_option(section, key)

    def _write(self):
        buf = io.StringIO()
        self._dat.write(buf)
        with open(self._slidedatfile, "wb") as fh:
            if self._have_bom:
                fh.write(bytearray(UTF8_BOM))
            fh.write(bytearray(buf.getvalue().replace("\n", "\r\n")))

    def delete_level(self, layer_name, level_name):
        level = self._levels[(layer_name, level_name)]
        record = level.record

        # Zero image data
        self._zero_record(record)

        # Delete pointer from nonhier table in index
        self._delete_index_record(record)

        # Remove slidedat keys
        for k in self._hier_keys_for_level(level):
            self._delete_key(MRXS_HIERARCHICAL, k)

        # Remove slidedat section
        self._delete_section(level.section)

        # Rename section and keys for subsequent levels in the layer
        prev_level = level
        for cur_level in self._level_list[record + 1:]:
            if cur_level.layer_id != prev_level.layer_id:
                break
            for k in self._hier_keys_for_level(cur_level):
                new_k = k.replace(cur_level.key_prefix, prev_level.key_prefix,
                                  1)
                self._rename_key(MRXS_HIERARCHICAL, k, new_k)
            self._set_key(MRXS_HIERARCHICAL, prev_level.section_key,
                          prev_level.section)
            self._rename_section(cur_level.section, prev_level.section)
            prev_level = cur_level

        # Update level count within layer
        count_k = "NONHIER_%d_COUNT" % level.layer_id
        count_v = self._dat.getint(MRXS_HIERARCHICAL, count_k)
        self._set_key(MRXS_HIERARCHICAL, count_k, count_v - 1)

        # Write slidedat
        self._write()

        # Refresh metadata
        self._make_levels()
コード例 #53
0
ファイル: plugin.py プロジェクト: isabella232/poezio
 def read(self):
     """Read the config file"""
     RawConfigParser.read(self, str(self.file_name))
     if not self.has_section(self.module_name):
         self.add_section(self.module_name)
コード例 #54
0
 def create(self):
     """Create a new empty cache at `path`
     """
     os.makedirs(self.path)
     with open(os.path.join(self.path, 'README'), 'w') as fd:
         fd.write('This is an Attic cache')
     config = RawConfigParser()
     config.add_section('cache')
     config.set('cache', 'version', '1')
     config.set('cache', 'repository', hexlify(self.repository.id).decode('ascii'))
     config.set('cache', 'manifest', '')
     with open(os.path.join(self.path, 'config'), 'w') as fd:
         config.write(fd)
     ChunkIndex.create(os.path.join(self.path, 'chunks').encode('utf-8'))
     with open(os.path.join(self.path, 'files'), 'w') as fd:
         pass  # empty file
コード例 #55
0
from pyquery import PyQuery
from base64 import b64encode
from libnacl.sign import Signer
from urllib import quote
import os, io
from configparser import RawConfigParser
from iso8601 import parse_date
from datetime import timedelta
from dateutil import parser


PWD = os.path.dirname(os.path.realpath(__file__.rstrip('cd')))

with open(PWD + '/../etc/locust.cfg', 'r') as f:
    sample_config = f.read()
config = RawConfigParser(allow_no_value=True)
config.read_file(io.BytesIO(sample_config))

section = os.path.basename(__file__.rstrip('cd'))
PARAMS = {}

for option in config.options(section):
    PARAMS[option] = config.get(section, option)

AUCTIONS_NUMBER = int(PARAMS['auctions_number'])
BIDDERS = [r.strip() for r in PARAMS['bidders'].split() if r.strip()]
SIGNATURE_KEY = PARAMS['signature_key']
tender_id_base = PARAMS['tender_id_base']
positions = 4
auction_id_template = \
    tender_id_base * (32 - positions) + '{{0:0{}d}}'.format(positions)
コード例 #56
0
ファイル: configuration.py プロジェクト: teicors/mqttwarn
    def __init__(self, configuration_file=None, defaults=None):

        defaults = defaults or {}

        self.configuration_path = None

        RawConfigParser.__init__(self)
        if configuration_file is not None:
            f = codecs.open(configuration_file, 'r', encoding='utf-8')
            self.read_file(f)
            f.close()

            self.configuration_path = os.path.dirname(configuration_file)
        ''' set defaults '''
        self.hostname = 'localhost'
        self.port = 1883
        self.username = None
        self.password = None
        self.clientid = None
        self.lwt = None
        self.skipretained = False
        self.cleansession = False
        self.protocol = 3

        self.logformat = '%(asctime)-15s %(levelname)-8s [%(name)-26s] %(message)s'
        self.logfile = "stream://sys.stderr"
        self.loglevel = 'DEBUG'

        self.functions = None
        self.num_workers = 1

        self.ca_certs = None
        self.tls_version = None
        self.certfile = None
        self.keyfile = None
        self.tls_insecure = False
        self.tls = False

        self.__dict__.update(defaults)
        self.__dict__.update(self.config('defaults'))

        if HAVE_TLS == False:
            logger.error("TLS parameters set but no TLS available (SSL)")
            sys.exit(2)

        if self.ca_certs is not None:
            self.tls = True

        if self.tls_version is not None:
            if self.tls_version == 'tlsv1_2':
                self.tls_version = ssl.PROTOCOL_TLSv1_2
            if self.tls_version == 'tlsv1_1':
                self.tls_version = ssl.PROTOCOL_TLSv1_1
            if self.tls_version == 'tlsv1':
                self.tls_version = ssl.PROTOCOL_TLSv1
            if self.tls_version == 'sslv3':
                self.tls_version = ssl.PROTOCOL_SSLv3

        self.loglevelnumber = self.level2number(self.loglevel)

        if self.functions is not None and self.functions.strip() != "":

            logger.info("Loading user-defined functions from %s" %
                        self.functions)

            # Load function file as given (backward-compatibility).
            if os.path.isfile(self.functions):
                functions_file = self.functions

            # Load function file as given if path is absolute.
            elif os.path.isabs(self.functions):
                functions_file = self.functions

            # Load function file relative to path of configuration file if path is relative.
            else:
                functions_file = os.path.join(self.configuration_path,
                                              self.functions)

            self.functions = load_functions(functions_file)
 def read_file(self):
     here = "config.read_file"
     config_read = RawConfigParser()
     config_read.read(self.__c_filename)
     self.scan_delay = float(config_read.getint('SetUp', 'scan_delay'))
     self.max_scans = int(config_read.getint('SetUp', 'max_scans'))
     self.log_directory = config_read.get('SetUp', 'log_directory')
     self.local_dir_www = config_read.get('SetUp', 'local_dir_www')
     self.sensor_config_filename = config_read.get(
         'SetUp', 'sensor_config_filename')
     self.ftp_creds_filename = config_read.get('SetUp',
                                               'ftp_creds_filename')
     self.delay_limit = float(config_read.get('SetUp', 'delay_limit'))
     self.delay_increment = float(
         config_read.get('SetUp', 'delay_increment'))
     self.ftp_log_max_count = float(
         config_read.get('SetUp', 'ftp_log_max_count'))
     self.heaterIP0 = config_read.get('SetUp', 'heaterIP0')
     self.heaterIP0_power_scale = float(
         config_read.get('SetUp', 'heaterIP0_power_scale'))
     self.heaterIP1 = config_read.get('SetUp', 'heaterIP1')
     self.heaterIP1_power_scale = float(
         config_read.get('SetUp', 'heaterIP1_power_scale'))
     self.heaterIP2 = config_read.get('SetUp', 'heaterIP2')
     self.heaterIP2_power_scale = float(
         config_read.get('SetUp', 'heaterIP2_power_scale'))
     self.heaterIP3 = config_read.get('SetUp', 'heaterIP3')
     self.heaterIP3_power_scale = float(
         config_read.get('SetUp', 'heaterIP3_power_scale'))
     self.sensor4readingmy_s5ensors = config_read.get(
         'SetUp', 'sensor4readings')
     self.change4log = float(config_read.get('SetUp', 'change4log'))
     self.control_hysteresis = float(
         config_read.get('SetUp', 'control_hysteresis'))
     self.default_target = float(config_read.get('SetUp', 'default_target'))
     self.default_target_full_power = float(
         config_read.get('SetUp', 'default_target_full_power'))
     self.max_target = float(config_read.get('SetUp', 'max_target'))
     self.min_target = float(config_read.get('SetUp', 'min_target'))
     self.precision = float(config_read.get('SetUp', 'precision'))
     self.target_integral = float(
         config_read.get('SetUp', 'target_integral'))
     self.one_heater_select = float(
         config_read.get('SetUp', 'one_heater_select'))
     self.percent_full_power = float(
         config_read.get('SetUp', 'percent_full_power'))
     self.watchdog_time = float(config_read.get('SetUp', 'watchdog_time'))
     self.ftp_timeout = float(config_read.get('SetUp', 'ftp_timeout'))
     return
コード例 #58
0
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""

import os
from configparser import RawConfigParser

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATE_DIR = os.path.join(BASE_DIR, 'templates')
STATIC_DIR = os.path.join(BASE_DIR, 'static')
MEDIA_DIR = os.path.join(BASE_DIR, 'media')

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
config = RawConfigParser()
config.read('/home/origen/origen/settings.ini')

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.get('secrets', 'SECRET_KEY')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True

ALLOWED_HOSTS = ['origenoptimized.com', '64.227.90.55', 'localhost']

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin', 'django.contrib.auth',
    'django.contrib.contenttypes', 'django.contrib.sessions',
コード例 #59
0
    },
    "storage": {
        "type": "filesystem",
        "custom_handler": "",
        "filesystem_folder":
        os.path.expanduser("~/.config/radicale/collections"),
        "database_url": ""
    },
    "logging": {
        "config": "/etc/radicale/logging",
        "debug": "False",
        "full_environment": "False"
    }
}

# Create a ConfigParser and configure it
_CONFIG_PARSER = ConfigParser()

for section, values in INITIAL_CONFIG.items():
    _CONFIG_PARSER.add_section(section)
    for key, value in values.items():
        _CONFIG_PARSER.set(section, key, value)

_CONFIG_PARSER.read("/etc/radicale/config")
_CONFIG_PARSER.read(os.path.expanduser("~/.config/radicale/config"))
if "RADICALE_CONFIG" in os.environ:
    _CONFIG_PARSER.read(os.environ["RADICALE_CONFIG"])

# Wrap config module into ConfigParser instance
sys.modules[__name__] = _CONFIG_PARSER
コード例 #60
0
ファイル: config.py プロジェクト: purplesparkle/gridsync
 def save(self, settings_dict):
     config = RawConfigParser(allow_no_value=True)
     config.read(self.filename)
     for section, d in settings_dict.items():
         if not config.has_section(section):
             config.add_section(section)
         for option, value in d.items():
             config.set(section, option, value)
     with atomic_write(self.filename, mode="w", overwrite=True) as f:
         config.write(f)