Exemplo n.º 1
0
def _read_from_sections(user, collection_url, permission):
    regex = ConfigParser({'login': user, 'path': collection_url})
    for rights in (INITIAL_RIGHTS, settings.DJRADICALE_RIGHTS):
        for section, values in rights.items():
            if not regex.has_section(section):
                regex.add_section(section)
            for key, value in values.items():
                regex.set(
                    section, key,
                    value % {
                        'login': re.escape(user),
                        'path': re.escape(collection_url),
                    })
    log.LOGGER.debug("Rights type '%s'" % __name__)

    for section in regex.sections():
        re_user = regex.get(section, 'user')
        re_collection = regex.get(section, 'collection')
        log.LOGGER.debug(
            "Test if '%s:%s' matches against '%s:%s' from section '%s'" % (
                user, collection_url, re_user, re_collection, section))
        user_match = re.match(re_user, user)
        if user_match:
            re_collection = re_collection.format(*user_match.groups())
            if re.match(re_collection, collection_url):
                log.LOGGER.debug("Section '%s' matches" % section)
                if permission in regex.get(section, 'permission'):
                    return True
            else:
                log.LOGGER.debug("Section '%s' does not match" % section)
    return False
Exemplo n.º 2
0
    def __init__(self):
        """클래스의 생성자 입니다.
         webdriver와 로그인에 사용될 email, password,
         어떤 그룹의 신규가입자에게 메세지를 보낼지 결정하는 group_id,
         메세지 내용을 담고있는 msg(message) 를
         settings.ini에서 읽어와 변수에 저장합니다.
        """

        self.driver = webdriver.Firefox()
        self.driver.implicitly_wait(30)
        self.base_url = "https://www.facebook.com/"
        self.verificationErrors = []
        self.accept_next_alert = True

        from configparser import RawConfigParser
        from os.path import join, abspath, dirname
        import codecs

        settings_path = join(abspath(dirname(__file__)), 'settings.ini')

        cfg = RawConfigParser()
        cfg.read_file(codecs.open(settings_path, "r", "utf8"))

        self.email = cfg.get('setting', "email")
        self.pw = cfg.get('setting', 'pw')
        self.group_id = cfg.get('setting', 'group_id')
        self.group_name = cfg.get('setting', 'group_name')
Exemplo n.º 3
0
def send_old_submits():

    submits = filter(lambda s:s.startswith("submit-"), os.listdir(os.getenv("PINGUINO_USER_PATH")))
    for submit in submits:
        parser = RawConfigParser()
        filename = os.path.join(os.getenv("PINGUINO_USER_PATH"), submit)
        parser.readfp(open(filename, "r"))

        summary = parser.get("SUBMIT", "summary")
        details = parser.get("SUBMIT", "details")
        repo = parser.get("SUBMIT", "repo")
        environ = parser.get("SUBMIT", "environ")
        username = parser.get("SUBMIT", "username")
        password = parser.get("SUBMIT", "password")


        try:
            url = 'https://api.github.com/repos/{}/{}/issues'.form
            session = requests.Session()
            session.auth = (username, password)
            issue = {'title': summary,
                     'body': "{}\n\n{}".format(details, environ),
                     'labels': ['submitted-from-ide',
                                'bug',
                                'v{}'.format(os.environ["PINGUINO_VERSION"][:2]),
                                ],
                     }
            r = session.post(url, json.dumps(issue))
            os.remove(filename)
        except:
            pass
Exemplo n.º 4
0
def parse_cmake_module(s_in):

    s_out = []
    is_rst_line = False
    for line in s_in.split('\n'):
        if is_rst_line:
            if len(line) > 0:
                if line[0] != '#':
                    is_rst_line = False
            else:
                is_rst_line = False
        if is_rst_line:
            s_out.append(line[2:])
        if '#.rst:' in line:
            is_rst_line = True

    autocmake_entry = '\n'.join(s_out).split('Example autocmake.cfg entry::')[1]
    autocmake_entry = autocmake_entry.replace('\n  ', '\n')

    buf = StringIO(autocmake_entry)
    config = RawConfigParser(dict_type=OrderedDict)
    config.readfp(buf)

    config_docopt = None
    config_define = None
    config_export = None
    for section in config.sections():
        if config.has_option(section, 'docopt'):
            config_docopt = config.get(section, 'docopt')
        if config.has_option(section, 'define'):
            config_define = config.get(section, 'define')
        if config.has_option(section, 'export'):
            config_export = config.get(section, 'export')

    return config_docopt, config_define, config_export
Exemplo n.º 5
0
    def _start_observing(self):
        # ``StatsCore`` is the original observant daemon. It seems to have been
        # replaced more recently by ``vigilant.daemon.Daemon``
        # TODO: update for vigilant Daemon

        # Get vigilant to read its own config
        config = CParser()
        config.read(os.path.join(etc_location, 'vigilant.cfg'))

        # Extract the information we need from the config object
        lock = str(config.get('daemon', 'lock'))
        sock = str(config.get('daemon', 'sock'))
        transport_type = str(config.get('transport', 'type'))
        host = str(config.get('transport', 'host'))
        port = int(config.get('transport', 'port'))
        transport_means = UDPStatsTransport if transport_type == 'udp' else TCPStatsTransport
        transport = transport_means(host=host, port=port)

        # Start the daemon
        self.client = StatsCore.attachOrCreateStatsDaemon(transport, pid=lock, sock=sock)

        # Tell the daemon who we are
        self.client.postWatchPid('go-smart-launcher', os.getpid())

        # Give it a second to avoid confusion by posting before registered
        # TODO: tidy this up!
        time.sleep(1)
Exemplo n.º 6
0
    def set_environ_vars(cls):

        if not os.path.exists(os.path.join(os.getenv("PINGUINO_DATA"), "paths.cfg")):
            logging.error("Missing: "+os.path.join(os.getenv("PINGUINO_DATA"), "paths.cfg"))
            sys.exit()

        config_paths = RawConfigParser()
        config_paths.readfp(open(os.path.join(os.getenv("PINGUINO_DATA"), "paths.cfg"), "r"))

        #RB20141116 : get the “bitness” of the current OS
        bitness, linkage = platform.architecture()
        os.environ["PINGUINO_OS_ARCH"] = bitness

        if os.name == "posix": #GNU/Linux
            os.environ["PINGUINO_OS_NAME"] = "linux"

        #Mac could return posix :/
        elif os.name == "os2":  #Mac OS X
            os.environ["PINGUINO_OS_NAME"] = "macosx"

        elif os.name == "nt":  #Windows
            os.environ["PINGUINO_OS_NAME"] = "windows"

        #load path from paths.conf
        os.environ["PINGUINO_USER_PATH"] = os.path.expandvars(os.path.expanduser(config_paths.get("paths-%s"%os.getenv("PINGUINO_OS_NAME"), "user_path")))
        os.environ["PINGUINO_INSTALL_PATH"] = os.path.expandvars(os.path.expanduser(config_paths.get("paths-%s"%os.getenv("PINGUINO_OS_NAME"), "install_path")))
        os.environ["PINGUINO_USERLIBS_PATH"] = os.path.expandvars(os.path.join(os.getenv("PINGUINO_USER_PATH"), "library_manager"))
Exemplo n.º 7
0
	def __init__(self, config_path=None):
		if config_path is None:
			config_path = 'regexbot.ini'
		config = RawConfigParser()
		config.read_dict(DEFAULT_CONFIG)
		config.read(config_path)

		self.rtm_token = config.get('regexbot', 'rtm_token')

		self.channel_flood_cooldown = timedelta(seconds=config.getint('regexbot', 'channel_flood_cooldown'))
		self.global_flood_cooldown = timedelta(seconds=config.getint('regexbot', 'global_flood_cooldown'))
		self.max_messages = config.getint('regexbot', 'max_messages')
		self.max_message_size = config.getint('regexbot', 'max_message_size')

		self.version = str(config.get('regexbot', 'version')) + '; %s'
		try: self.version = self.version % Popen(["git","branch","-v","--contains"], stdout=PIPE).communicate()[0].strip()
		except: self.version = self.version % 'unknown'

		self._last_message_times = {}
		self._last_message = datetime.utcnow()
		self._message_buffer = {}

		self.ignore_list = []
		if config.has_section('ignore'):
			for k,v in config.items('ignore'):
				try:
					self.ignore_list.append(regex.compile(str(v), regex.I))
				except Exception, ex:
					print "Error compiling regular expression in ignore list (%s):" % k
					print "  %s" % v
					print ex
					exit(1)
Exemplo n.º 8
0
class BCMConfig():
    """ Reads configuration information from bcm.ini.

        Args:
            username (str): bricklink username
            password (str): bricklink password
            wantedfilename (str): path of the wanted list
            pricefilename (str): path of the previously scrubbed price list
            reloadpricesfromweb (bool): if true, download and parse all of the price data again and save it to
                pricefilename.

            _parser (SafeConfigParser): parser that reads the config file
            _configfile (str): relative path of the config file
    """

    def __init__(self):
        #  _parser = SafeConfigParser()
        self._configfile = '../bcm.ini'
        self._parser = RawConfigParser()

        self._parser.read(self._configfile)
        self.username = self._parser.get('bricklink', 'username')
        self.password = self._parser.get('bricklink', 'password')
        self.wantedfilename = self._parser.get('filenames', 'wanted')
        self.pricefilename = self._parser.get('filenames', 'prices')
        self.reloadpricesfromweb = self._parser.getboolean('options', 'reloadpricesfromweb')
Exemplo n.º 9
0
def test():
    config = CParser()
    config.read('./etc/observant/observant.cfg')

    pid = os.fork()
    if pid != 0:
        os.waitpid(pid, 0)
    else:
        dump = './.test.out'
        sockPath = './.test.sock'
        server = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
        server.setblocking(0)
        with open(dump, 'w') as fd:
            fd.write("test")
            server.bind(sockPath)
            server.listen(1)
            transport = daemon.transportFromConfig(config)
            lock = str(config.get('daemon', 'lock'))
            sock = str(config.get('daemon', 'sock'))
            key = str(config.get('daemon', 'key'))
            client = StatsCore.attachOrCreateStatsDaemon(key, transport, pid=lock, sock=sock)
            client.postWatchPid('test', os.getpid())
            time.sleep(4)
            client.postLogMessageForKey('test', 'some random logmessage')
            time.sleep(4)
            client.postLogMessageForKey('test', 'some random logmessage')
            client.close()
            time.sleep(4)
        os.remove(dump)
        server.close()
        os.unlink(sockPath)
Exemplo n.º 10
0
def init(app, config_file):
    global metadata
    config = ConfigParser()
    config.read(config_file)

    try:
        filename = config.get('bibtex', 'path')
    except ConfigParserError:
        model_path = config.get('main', 'path')
        filename = os.path.join(model_path, 'library.bib')

    print("Loading Bibtex metadata from", filename)
    bib = parse_file(filename)

    metadata = dict()
    for entry in bib.entries:
        key = '/' + bib.entries[entry].fields.get('file', '').replace(':pdf', '')[1:]
        if 'C$\\backslash$:' in key:
            key = key.replace('C$\\backslash$:', '')
            key = key[1:]
            key = os.path.normpath(key)
        key = os.path.basename(key)
        try:
            citation = pybtex.format_from_file(
                filename, style='plain', output_backend='text', citations=[entry])[3:]
            metadata[key] = citation
        except PybtexError:
            metadata[key] = filename
Exemplo n.º 11
0
    def _parse_legacy_config_file(self):
        """
        Parse a legacy configuration file.
        """
        conf = RawConfigParser()
        conf.read(LEGACY_CONFIG_FILE)

        styles = self.styles.copy()

        if conf.has_option('params', 'dm_template'):
            styles['dm_template'] = conf.get('params', 'dm_template')

        if conf.has_option('params', 'header_template'):
            styles['header_template'] = conf.get('params', 'header_template')

        self.styles.update(styles)

        if conf.has_option('params', 'logging_level'):
            self.logging_level = conf.getint('params', 'logging_level')

        for binding in self.key_bindings:
            if conf.has_option('keys', binding):
                custom_key = conf.get('keys', binding)
                self._set_key_binding(binding, custom_key)

        palette_labels = [color[0] for color in PALETTE]
        for label in palette_labels:
            if conf.has_option('colors', label):
                custom_fg = conf.get('colors', label)
                self._set_color(label, custom_fg)
Exemplo n.º 12
0
    def parse_and_append(self, filename):
        try:
            parser = RawConfigParser()
            parser.read([filename])
            if not parser.has_section(sect):
                return

            app_categories = parser.get(sect, 'Categories')
            if not app_categories:
                return

            if not any(category in self.PLAYER_CATEGORIES
                       for category in app_categories.split(';')):
                return

            # Find out if we need it by comparing mime types
            app_mime = parser.get(sect, 'MimeType')
            for needed_type in self.mimetypes:
                if app_mime.find(needed_type + '/') != -1:
                    app_name = parser.get(sect, 'Name')
                    app_cmd = parser.get(sect, 'Exec')
                    app_icon = parser.get(sect, 'Icon')
                    if not self.__has_sep:
                        self.add_separator()
                    self.apps.append(UserApplication(app_name, app_cmd, app_mime, app_icon))
                    return
        except:
            return
Exemplo n.º 13
0
def read_config_file(filename):
    """
    Reads a configuration file to modify the global settings.
    :param filename: cfg file pathname, read through os.path.normpath
    """
    global LOG_FORMAT, LOG_FILENAME, STATE_REGEX, ZIP_REGEX
    
    # Config parser object, load settings for global variables
    config = RawConfigParser()
    config.read(path.normpath(filename))

    # Sections should be "log" and "validators"
    for section in config.sections():
        # Options for log: format, output
        # Options for validators: zip_code, state
        for option in config.options(section):

            if section == "log" and option == "format":
                LOG_FORMAT = config.get(section, option)
            elif section == "log" and option == "output":
                LOG_FILENAME = config.get(section, option)
            elif section == "validators" and option == "state":
                STATE_REGEX = compile(config.get(section, option))
            elif section == "validators" and option == "zip_code":
                ZIP_REGEX = compile(config.get(section, option))
Exemplo n.º 14
0
Arquivo: config.py Projeto: kernt/ploy
 def read_config(self, config):
     result = []
     stack = [config]
     while 1:
         config = stack.pop()
         src = None
         if isinstance(config, (str, unicode)):
             src = os.path.relpath(config)
         _config = RawConfigParser()
         _config.optionxform = lambda s: s
         if getattr(config, 'read', None) is not None:
             _config.readfp(config)
             path = self.path
         else:
             if not os.path.exists(config):
                 log.error("Config file '%s' doesn't exist.", config)
                 sys.exit(1)
             _config.read(config)
             path = os.path.dirname(config)
         for section in reversed(_config.sections()):
             for key, value in reversed(_config.items(section)):
                 result.append((src, path, section, key, value))
             result.append((src, path, section, None, None))
         if _config.has_option('global', 'extends'):
             extends = _config.get('global', 'extends').split()
         elif _config.has_option('global:global', 'extends'):
             extends = _config.get('global:global', 'extends').split()
         else:
             break
         stack[0:0] = [
             os.path.abspath(os.path.join(path, x))
             for x in reversed(extends)]
     return reversed(result)
Exemplo n.º 15
0
    def parse_token_file(self, token_file):
        conf = RawConfigParser()
        conf.read(token_file)

        if conf.has_option(SECTION_TOKEN, "oauth_token"):
            self.oauth_token = conf.get(SECTION_TOKEN, "oauth_token")
        if conf.has_option(SECTION_TOKEN, "oauth_token_secret"):
            self.oauth_token_secret = conf.get(SECTION_TOKEN, "oauth_token_secret")
Exemplo n.º 16
0
    def version(self, root):
        """Retrieve the release version of the installed browser."""
        platform_info = RawConfigParser()

        with open(os.path.join(root, self.platform_ini), "r") as fp:
            platform_info.readfp(BytesIO(fp.read()))
            return "BuildID %s; SourceStamp %s" % (
                platform_info.get("Build", "BuildID"),
                platform_info.get("Build", "SourceStamp"))
Exemplo n.º 17
0
    def _parse_legacy_token_file(self):
        conf = RawConfigParser()
        conf.read(LEGACY_TOKEN_FILE)

        if conf.has_option(SECTION_TOKEN, "oauth_token"):
            self.oauth_token = conf.get(SECTION_TOKEN, "oauth_token")

        if conf.has_option(SECTION_TOKEN, "oauth_token"):
            self.oauth_token_secret = conf.get(SECTION_TOKEN, "oauth_token_secret")
Exemplo n.º 18
0
def settings_from_config(options):
    """Try to read config file and parse settings.

    Args:
        options: parsed NameSpace, with `config` and maybe `acl` values

    Returns:
        tuple of S3Config and PyPIConfig objects, or Nones when missing values
    """

    parser = RawConfigParser()

    if isinstance(options.config, list):
        config_file = options.config[0]
    else:
        config_file = options.config

    try:
        parser.read(config_file)
    except Exception as error:
        print(error, file=sys.stderr)

    key = "pypicloud"  # config section key
    if key not in parser.sections():
        return None, None

    s3_conf = None
    pypi_conf = None

    s3_required = ("bucket", "access", "secret")
    pypi_required = ("repository", "username", "password")

    if all([parser.has_option(key, opt) for opt in s3_required]):
        if getattr(options, "acl", None):
            acl = options.acl[0]
        elif parser.has_option(key, "acl"):
            acl = parser.get(key, "acl")
        else:
            acl = None

        s3_conf = S3Config(
            parser.get(key, "bucket"),
            parser.get(key, "access"),
            parser.get(key, "secret"),
            acl,
        )

    if all([parser.has_option(key, opt) for opt in pypi_required]):
        pypi_conf = PyPIConfig(
            parser.get(key, "repository"),
            parser.get(key, "username"),
            parser.get(key, "password"),
        )

    return s3_conf, pypi_conf
Exemplo n.º 19
0
 def from_ini_file(cls, cfg_file, no_default=False):
     lst = cls(no_default)
     config = RawConfigParser()
     if config.read([cfg_file]) != []:
         for section in config.sections():
             try:
                 test = config.get(section, 'TEST')
             except NoOptionError:
                 test = 'lambda(x): True'
             lst.append(Definition(section, config.get(section, 'MODULE'),
                                   eval(test)))
     return lst
Exemplo n.º 20
0
    def load_mc_config(self):
        """Load the configuration from minecraft-control."""
        log.debug("Loading minecraft-control settings from {}".format(args.config))

        parser = RawConfigParser()
        with open(self.args.config, "r") as fh:
            parser.readfp(fh)

        self.config["host"] = parser.get("tcp-server", "address")
        self.config["port"] = parser.get("tcp-server", "port")
        self.config["password"] = parser.get("auth", "password")
        self.config["method"] = parser.get("auth", "method")
Exemplo n.º 21
0
class FactorioLocale:
    def __init__(self):
        self.conf = RawConfigParser()
        self.crap = RawConfigParser()

    def get_name(self, section, name):
        return self.conf.get(section, name) or '#%s#%s#' % (section, name)

    def load(self, csv):
        conf = RawConfigParser()
        with open(csv, 'rb') as f:
            input_bytes = f.read()
            decoded = input_bytes.decode(chardet.detect(input_bytes)['encoding'])
            decoded = '[__global__]\n' + decoded
            conf.read_string(decoded)

        for sec in conf.sections():
            if not self.conf.has_section(sec):
                self.conf.add_section(sec)
                self.crap.add_section(sec)

            for k, v in conf.items(sec):
                is_crap = False

                if '__' in v:
                    is_crap = True

                if not is_crap:
                    if self.conf.has_option(sec, k):
                        if self.conf.get(sec, k).lower() != v.lower():
                            print('Overwriting locale %s (%r -> %r)' % (k, self.conf.get(sec, k), v))

                    self.conf.set(sec, k, v)
                else:
                    if self.crap.has_option(sec, k):
                        print('Overwriting crap locale %s (%r -> %r)' % (k, self.crap.get(sec, k), v))

                    self.crap.set(sec, k, v)

    def merge(self):
        for sec in self.crap.sections():
            for k, v in self.crap.items(sec):
                if not self.conf.has_option(sec, k):
                    print('Using crap locale %s (%r)' % (k, v))
                    self.conf.set(sec, k, v)

    def save(self, out):
        with open(out, 'w') as f:
            self.conf.write(f)
Exemplo n.º 22
0
def read_pypirc(repository=DEFAULT_REPOSITORY, realm=DEFAULT_REALM):
    """Read the .pypirc file."""
    rc = get_pypirc_path()
    if os.path.exists(rc):
        config = RawConfigParser()
        config.read(rc)
        sections = config.sections()
        if "distutils" in sections:
            # let's get the list of servers
            index_servers = config.get("distutils", "index-servers")
            _servers = [server.strip() for server in index_servers.split("\n") if server.strip() != ""]
            if _servers == []:
                # nothing set, let's try to get the default pypi
                if "pypi" in sections:
                    _servers = ["pypi"]
                else:
                    # the file is not properly defined, returning
                    # an empty dict
                    return {}
            for server in _servers:
                current = {"server": server}
                current["username"] = config.get(server, "username")

                # optional params
                for key, default in (("repository", DEFAULT_REPOSITORY), ("realm", DEFAULT_REALM), ("password", None)):
                    if config.has_option(server, key):
                        current[key] = config.get(server, key)
                    else:
                        current[key] = default
                if current["server"] == repository or current["repository"] == repository:
                    return current
        elif "server-login" in sections:
            # old format
            server = "server-login"
            if config.has_option(server, "repository"):
                repository = config.get(server, "repository")
            else:
                repository = DEFAULT_REPOSITORY

            return {
                "username": config.get(server, "username"),
                "password": config.get(server, "password"),
                "repository": repository,
                "server": server,
                "realm": DEFAULT_REALM,
            }

    return {}
Exemplo n.º 23
0
def populate_config_dict(config_path):
    """Load the configuration file into the config_file dictionary

    A ConfigParser-style configuration file can have multiple sections, but
    we ignore the section distinction  and load the key/value pairs from all
    sections into a single key/value list.
    """
    try:
        config_dict = {}
        parser = RawConfigParser()
        parser.optionxform = lambda x: x
        parser.read(config_path)
        sections = parser.sections()
        for section in sections:
            options = parser.options(section)
            for option in options:
                config_dict[option] = str(parser.get(section, option))
    except Exception as e:
        logger.warning("Could not load configuration file due to exception. "
                       "Only environment variable equivalents will be used.")
        return None

    for key in config_dict.keys():
        if config_dict[key] == '':
            config_dict[key] = None
        elif isinstance(config_dict[key], str):
            config_dict[key] = os.path.expanduser(config_dict[key])
    return config_dict
Exemplo n.º 24
0
def run():
	# load the config file
	global master_config
	master_config = RawConfigParser()
	master_config.read(config_directory + MASTER_CONFIG_FILENAME)

	# set the get-iplayer path
	global get_iplayer_path
	if master_config.has_option("General", "get-iplayer_path"):
		get_iplayer_path = master_config.get("General", "get-iplayer_path")

	# refresh the get-iplayer cache
	# print("Refreshing get-iplayer... (this may take some time)")
	# subprocess.check_output([get_iplayer_path, "--type=all", "--quiet"])

	# new BBC rules :-( now we get the programm info externally using a different script
	# and read that scripts output directly into a hash
	# global bbc_programmes
	# bbc_programmes = load_bbc_programmes()

	# scan for feed config files and process each
	for root, directories, files in os.walk(config_directory + FEED_CONFIG_DIRECTORY):
		for filename in files:
			if filename == ".DS_Store":
				continue

			print("about to read config " + filename )
			load_feed(filename)
		print("Finished.")
		return # stop here, we have processed the feeds

	# if we have not returned at this point, then no config directory was found, this is a problem
	print("No config directory found")
Exemplo n.º 25
0
def run():
	# print a warning about copyright
	print("WARNING: Do not use the script to produce public podcasts, it is for personal use only.")
	print("If you publically serve programmes you may be in violation of the BBC's copyright.")
	
	# load the config file
	global master_config
	master_config = RawConfigParser()
	master_config.read(config_directory + MASTER_CONFIG_FILENAME)
	
	# set the get-iplayer path
	global get_iplayer_path
	if master_config.has_option("General", "get-iplayer_path"):
		get_iplayer_path = master_config.get("General", "get-iplayer_path")
		
	# refresh the get-iplayer cache
	print("Refreshing get-iplayer... (this may take some time)")
	subprocess.check_output([get_iplayer_path, "--type=all", "--quiet"])
	
	# scan for feed config files and process each
	for root, directories, files in os.walk(config_directory + FEED_CONFIG_DIRECTORY):
		for filename in files:
			load_feed(filename)
		print("Finished.")
		return # stop here, we have processed the feeds
	
	# if we have not returned at this point, then no config directory was found, this is a problem
	print("No config directory found")
Exemplo n.º 26
0
    def _getExtendedConfs(self, conf_filename, conf_data, confs=None):
        """Return a list of tuple (conf_name, parser, encoding_errors).

        :param conf_filename: The path and name of the conf file.
        :param conf_data: Unparsed config data.
        :param confs: A list of confs that extend filename.
        :return: A list of confs ordered from extender to extendee.
        :raises IOError: If filename cannot be read.

        This method parses the config data and checks for encoding errors.
        It checks parsed config data for the extends key in the meta section.
        It reads the unparsed config_data from the extended filename.
        It passes filename, data, and the working list to itself.
        """
        if confs is None:
            confs = []
        encoding_errors = self._verifyEncoding(conf_data)
        parser = RawConfigParser()
        parser.readfp(StringIO(conf_data), conf_filename)
        confs.append((conf_filename, parser, encoding_errors))
        if parser.has_option('meta', 'extends'):
            base_path = dirname(conf_filename)
            extends_name = parser.get('meta', 'extends')
            extends_filename = abspath('%s/%s' % (base_path, extends_name))
            extends_data = read_content(extends_filename)
            self._getExtendedConfs(extends_filename, extends_data, confs)
        return confs
Exemplo n.º 27
0
Arquivo: state.py Projeto: spanezz/egt
    def load(self, statedir: str = None) -> None:
        if statedir is None:
            statedir = self.get_state_dir()

        statefile = os.path.join(statedir, "state.json")
        if os.path.exists(statefile):
            # Load state from JSON file
            with open(statefile, "rt") as fd:
                state = json.load(fd)
            self.projects = state["projects"]
            return

        # TODO: remove support for legacy format
        statefile = os.path.join(statedir, "state")
        if os.path.exists(statefile):
            # Load state from legacy .ini file
            from configparser import RawConfigParser
            cp = RawConfigParser()
            cp.read([statefile])
            for secname in cp.sections():
                if secname.startswith("proj "):
                    name = secname.split(None, 1)[1]
                    fname = cp.get(secname, "fname")
                    self.projects[name] = {"fname": fname}
            return
Exemplo n.º 28
0
	def read_merged(self, filenames, encoding=None):
		cfg = []
		for filename in filenames:
			_cfg = RawConfigParser()
			_cfg.read(filename)
			cfg.append(_cfg)

		for _cfg in cfg:
			for section in _cfg.sections():
				if not self.has_section(section):
					self.add_section(section)

				for option in _cfg.options(section):
					value = _cfg.get(section, option)
					if ";" in value:
						current = self.getdefault(section, option, "")
						if ";" in current:
							val = []
							for v in value.split(";"):
								if v and v not in val:
									val.append(v)

							for v in self.getlist(section, option):
								if v and v not in val:
									val.append(v)

							self.set(section, option, ";".join(val) + ";")
							continue
					self.set(section, option, value)
Exemplo n.º 29
0
class Options:
    def __init__(self, name, defaults):
        load_paths = list(reversed([os.path.join(directory, '%s.cfg' % name)
                                    for directory
                                    in load_config_paths(name)]))
        self.save_path = os.path.join(save_config_path(name), '%s.cfg' % name)

        self.config = RawConfigParser(defaults)
        self.paths = self.config.read(load_paths)
        self.section = name if self.config.has_section(name) else 'DEFAULT'

    def get(self, option):
        try:
            return self.config.get(self.section, option)
        except NoOptionError:
            return None

    def set(self, option, value):
        if value is not None:
            self.config.set(self.section, option, value)
        else:
            self.config.remove_option(self.section, option)

        defaults = self.config._defaults
        self.config._defaults = None
        with open(self.save_path, 'w') as save_file:
            self.config.write(save_file)
        self.config._defaults = defaults
Exemplo n.º 30
0
 def get(self, section, option, default=None):
     try:
         return RawConfigParser.get(self, section, option)
     except NoSectionError:
         if default is None:
             raise
         return default
Exemplo n.º 31
0
import os
from configparser import RawConfigParser

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

local_config_path = os.path.join(BASE_DIR, 'conf', 'local.conf')
config_parser = RawConfigParser()
config_parser.read(local_config_path)

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config_parser.get('main', 'SECRET_KEY')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config_parser.getboolean('main', 'DEBUG')

ALLOWED_HOSTS = []

SITE_ID = 1

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
Exemplo n.º 32
0
from configparser import RawConfigParser


def keys_to_upper(conf_section):
    d = {}
    for key, value in conf_section.items():
        d[key.upper()] = value
    return d


config = RawConfigParser()
config.read("/CloudFS/vuohiportal/configs/settings.ini")

try:
    # SSL
    header = (config.get('SSL', 'SECURE_PROXY_SSL_HEADER_PROTOCOL'),
              config.get('SSL', 'SECURE_PROXY_SSL_HEADER_VALUE'))
    SECURE_PROXY_SSL_HEADER = header
    SECURE_SSL_REDIRECT = config.getboolean('SSL', 'SECURE_SSL_REDIRECT')
    SESSION_COOKIE_SECURE = config.getboolean('SSL', 'SESSION_COOKIE_SECURE')
    CSRF_COOKIE_SECURE = config.getboolean('SSL', 'CSRF_COOKIE_SECURE')
except ValueError:
    pass

CORS_ALLOW_ALL_ORIGINS = True

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
Exemplo n.º 33
0
from bs4 import BeautifulSoup
from dateutil.parser import parse

import glob
import gc
import json

import MySQLdb as my

__location__ = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__)))
configFile = os.path.join(__location__, 'Settings.ini')

db_config = RawConfigParser()
db_config.read(configFile)

db_name = db_config.get('database', 'DATABASE_NAME')
db_user = db_config.get('database', 'DATABASE_USER')
db_pwd = db_config.get('database', 'DATABASE_PASSWORD')
db_host = db_config.get('database', 'DATABASE_HOST')
db_port = db_config.get('database', 'DATABASE_PORT')

isDEBUG = False


# Database SELECT
# For every sub-query, the iterable result is appended to a master list of results
def bulk_upload(query):
    success = False
    try:

        conn = mysql.connector.connect(user=db_user, password=db_pwd, host=db_host, port=db_port, database=db_name)
Exemplo n.º 34
0
                          type=str,
                          nargs='?',
                          help='Login password or read from config.ini.')
    arparser.add_argument(
        '--notify',
        type=str,
        default=None,
        nargs='?',
        help='Phone number to send a text message summary of results.')
    args = arparser.parse_args()

    parser = RawConfigParser()
    parser.read('config.ini')

    # Get email/password from config file
    email = os.getenv('email') or args.user or parser.get(args.config, 'email')
    password = os.getenv('password') or args.password or parser.get(
        args.config, 'password')
    phone = os.getenv('notify') or args.notify or parser.get(
        args.config, 'notify') if parser.has_option(args.config,
                                                    'notify') else None
    delay = 10

    if args.store == 'shoprite':
        grocery_coupons.shoprite(email, password, phone, delay, onStatus)
    elif args.store == 'stop_and_shop':
        grocery_coupons.stop_and_shop(email, password, phone, delay, onStatus)
    elif args.store == 'acme':
        grocery_coupons.acme(email, password, phone, delay, onStatus)
    elif args.store == 'help':
        print('Usage: client.py [shoprite | acme | stop_and_shop]')
Exemplo n.º 35
0
# 获取账号参数
try:
    configinfo = RawConfigParser()
    try:
        configinfo.read(pwd + "OpenCardConfig.ini", encoding="UTF-8")
    except Exception as e:
        with open(pwd + "OpenCardConfig.ini", "r", encoding="UTF-8") as config:
            getConfig = config.read().encode('utf-8').decode('utf-8-sig')
        with open(pwd + "OpenCardConfig.ini", "w", encoding="UTF-8") as config:
            config.write(getConfig)
        try:
            configinfo.read(pwd + "OpenCardConfig.ini", encoding="UTF-8")
        except:
            configinfo.read(pwd + "OpenCardConfig.ini", encoding="gbk")
    cookies = configinfo.get('main', 'JD_COOKIE')
    openCardBean = configinfo.getint('main', 'openCardBean')
    sleepNum = configinfo.getfloat('main', 'sleepNum')
    record = configinfo.getboolean('main', 'record')
    onlyRecord = configinfo.getboolean('main', 'onlyRecord')
    memory = configinfo.getboolean('main', 'memory')
    printlog = configinfo.getboolean('main', 'printlog')
    isRemoteSid = configinfo.getboolean('main', 'isRemoteSid')
    TG_BOT_TOKEN = configinfo.get('main', 'TG_BOT_TOKEN')
    TG_USER_ID = configinfo.get('main', 'TG_USER_ID')
    PUSH_PLUS_TOKEN = configinfo.get('main', 'PUSH_PLUS_TOKEN')
    TG_PROXY_IP = configinfo.get('main', 'TG_PROXY_IP')
    TG_PROXY_PORT = configinfo.get('main', 'TG_PROXY_PORT')
    TG_API_HOST = configinfo.get('main', 'TG_API_HOST')
    QYWX_AM = configinfo.get('main', 'QYWX_AM')
    Concurrent = configinfo.getboolean('main', 'Concurrent')
Exemplo n.º 36
0
                'django.contrib.auth.context_processors.auth',
                'django.contrib.messages.context_processors.messages',
            ],
        },
    },
]

WSGI_APPLICATION = 'aniholo.wsgi.application'

# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases

DATABASES = {
    'default': {
        'ENGINE': 'django.db.backends.postgresql_psycopg2',
        'NAME': config.get('database_login', 'db_name'),
        'USER': config.get('database_login', 'db_user'),
        'PASSWORD': config.get('database_login', 'db_pass'),
        'HOST': config.get('database_login', 'db_host'),
        'PORT': config.get('database_login', 'db_port'),
    }
}

# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators

AUTH_PASSWORD_VALIDATORS = [
    {
        'NAME':
        'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
    },
config = RawConfigParser()
config.read('/home/production/settings.ini')

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
TEMPLATES_DIR1 = os.path.join(BASE_DIR, 'templates/dashboard')
TEMPLATES_DIR2 = os.path.join(BASE_DIR, 'templates/Ercesscorp')
TEMPLATES_DIR3 = os.path.join(BASE_DIR, 'templates')
STATIC_DIR = os.path.join(BASE_DIR, 'templates/static')
MEDIA_DIR = os.path.join(BASE_DIR, 'media')
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.get('section', 'secret_key')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False

ALLOWED_HOSTS = ['ercess.com', '127.0.0.1']

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
    'django.contrib.staticfiles',
Exemplo n.º 38
0
def main():
    opts = parse_args()

    try:
        config = RawConfigParser()
        config.read(opts.config_file)
        config.options("master-public-keys")
    except NoSectionError:
        print("ERROR: Non-existant configuration file {}".format(
            opts.config_file))
        return
    logger = logging.getLogger('ELECTRUMPERSONALSERVER')
    logger, logfilename = logger_config(logger, config)
    logger.info('Starting Electrum Personal Server ' +
                str(SERVER_VERSION_NUMBER))
    logger.info('Logging to ' + logfilename)
    logger.debug("Process ID (PID) = " + str(os.getpid()))
    rpc_u = None
    rpc_p = None
    cookie_path = None
    try:
        rpc_u = config.get("bitcoin-rpc", "rpc_user")
        rpc_p = config.get("bitcoin-rpc", "rpc_password")
        logger.debug("obtaining auth from rpc_user/pass")
    except NoOptionError:
        cookie_path = obtain_cookie_file_path(
            config.get("bitcoin-rpc", "datadir"))
        logger.debug("obtaining auth from .cookie")
    if rpc_u == None and cookie_path == None:
        return
    rpc = JsonRpc(host=config.get("bitcoin-rpc", "host"),
                  port=int(config.get("bitcoin-rpc", "port")),
                  user=rpc_u,
                  password=rpc_p,
                  cookie_path=cookie_path,
                  wallet_filename=config.get("bitcoin-rpc",
                                             "wallet_filename").strip(),
                  logger=logger)

    #TODO somewhere here loop until rpc works and fully sync'd, to allow
    # people to run this script without waiting for their node to fully
    # catch up sync'd when getblockchaininfo blocks == headers, or use
    # verificationprogress
    printed_error_msg = False
    while bestblockhash[0] == None:
        try:
            bestblockhash[0] = rpc.call("getbestblockhash", [])
        except JsonRpcError as e:
            if not printed_error_msg:
                logger.error("Error with bitcoin json-rpc: " + repr(e))
                printed_error_msg = True
            time.sleep(5)
    try:
        rpc.call("listunspent", [])
    except JsonRpcError as e:
        logger.error(repr(e))
        logger.error("Wallet related RPC call failed, possibly the " +
                     "bitcoin node was compiled with the disable wallet flag")
        return

    test_keydata = (
        "2 tpubD6NzVbkrYhZ4YVMVzC7wZeRfz3bhqcHvV8M3UiULCfzFtLtp5nwvi6LnBQegrkx"
        +
        "YGPkSzXUEvcPEHcKdda8W1YShVBkhFBGkLxjSQ1Nx3cJ tpubD6NzVbkrYhZ4WjgNYq2nF"
        +
        "TbiSLW2SZAzs4g5JHLqwQ3AmR3tCWpqsZJJEoZuP5HAEBNxgYQhtWMezszoaeTCg6FWGQB"
        + "T74sszGaxaf64o5s")
    chain = rpc.call("getblockchaininfo", [])["chain"]
    try:
        gaplimit = 5
        deterministicwallet.parse_electrum_master_public_key(
            test_keydata, gaplimit, rpc, chain)
    except ValueError as e:
        logger.error(repr(e))
        logger.error(
            "Descriptor related RPC call failed. Bitcoin Core 0.20.0" +
            " or higher required. Exiting..")
        return
    if opts.rescan:
        rescan_script(logger, rpc, opts.rescan_date)
        return
    while True:
        logger.debug("Checking whether rescan is in progress")
        walletinfo = rpc.call("getwalletinfo", [])
        if "scanning" in walletinfo and walletinfo["scanning"]:
            logger.debug("Waiting for Core wallet rescan to finish")
            time.sleep(300)
            continue
        break
    import_needed, relevant_spks_addrs, deterministic_wallets = \
        get_scriptpubkeys_to_monitor(rpc, config)
    if import_needed:
        if not relevant_spks_addrs and not deterministic_wallets:
            #import = true and no addresses means exit
            return
        deterministicwallet.import_addresses(
            rpc,
            relevant_spks_addrs,
            deterministic_wallets,
            change_param=-1,
            count=int(config.get("bitcoin-rpc", "initial_import_count")))
        logger.info(
            "Done.\nIf recovering a wallet which already has existing" +
            " transactions, then\nrun the rescan script. If you're confident" +
            " that the wallets are new\nand empty then there's no need to" +
            " rescan, just restart this script")
    else:
        txmonitor = transactionmonitor.TransactionMonitor(
            rpc, deterministic_wallets, logger)
        if not txmonitor.build_address_history(relevant_spks_addrs):
            return
        try:
            run_electrum_server(rpc, txmonitor, config)
        except KeyboardInterrupt:
            logger.info('Received KeyboardInterrupt, quitting')
import os
from configparser import RawConfigParser

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
PROJECT_DIR = os.path.dirname(BASE_DIR)

local_config_path = os.path.join(BASE_DIR, 'conf', 'local.conf')
config = RawConfigParser()
config.read(local_config_path)

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.get('main', 'SECRET_KEY')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config.getboolean('main', 'DEBUG')

ALLOWED_HOSTS = []

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
    'django.contrib.staticfiles',
Exemplo n.º 40
0
                'django.template.context_processors.request',
                'django.contrib.auth.context_processors.auth',
                'django.contrib.messages.context_processors.messages',
            ],
        },
    },
]

WSGI_APPLICATION = 'jaintapp.wsgi.application'

# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases

DATABASES = {
    'default': {
        'ENGINE': config.get('db_pgsql', 'ENGINE'),
        'NAME': config.get('db_pgsql', 'NAME'),
        'HOST': config.get('db_pgsql', 'HOST'),
        'USER': config.get('db_pgsql', 'USER'),
        'PASSWORD': config.get('db_pgsql', 'PASSWORD'),
    }
}

# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators

AUTH_PASSWORD_VALIDATORS = [
    {
        'NAME':
        'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
    },
Exemplo n.º 41
0
def init(domain):
	C = []

	print(colored("[*]-Searching Censys...", "yellow"))

	parser = RawConfigParser()
	parser.read("config.ini")
	API_URL = "https://www.censys.io/api/v1"
	UID = parser.get("Censys", "CENSYS_UID")
	SECRET = parser.get("Censys", "CENSYS_SECRET")

	if UID == "" or SECRET == "":
		print("  \__", colored("No Censys API credentials configured", "red"))
		return []

	else:
		payload = {"query": domain}

		try:
			res = requests.post(API_URL + "/search/certificates", json=payload, auth=(UID, SECRET))

			if res.status_code == 429:
				print("  \__", colored("Rate limit exceeded. See https://www.censys.io/account for rate limit details.", "red"))
				return C

			C = re.findall("CN=([\w\.\-\d]+)\." + domain, str(res.content))
			numberOfPages = re.findall("pages\":\s(\d+)?}", str(res.content))

			for page in range(2, int(numberOfPages[0]) + 1):
				payload = {"query": domain, "page": page}
				res = requests.post(API_URL + "/search/certificates", json=payload, auth=(UID, SECRET))
				tempC = re.findall("CN=([\w\.\-\d]+)\." + domain, str(res.content))
				C = C + tempC

			C = set(C)

			print("  \__ {0}: {1}".format(colored("Subdomains found", "cyan"), colored(len(C), "yellow")))
			return C

		except KeyError as errk:
			print("  \__", colored(errk, "red"))
			return []

		except requests.exceptions.RequestException as err:
			print("  \__", colored(err, "red"))
			return []

		except requests.exceptions.HTTPError as errh:
			print("  \__", colored(errh, "red"))
			return []

		except requests.exceptions.ConnectionError as errc:
			print("  \__", colored(errc, "red"))
			return []

		except requests.exceptions.Timeout as errt:
			print("  \__", colored(errt, "red"))
			return []

		except Exception:
			print("  \__", colored("Something went wrong!", "red"))
			return []
Exemplo n.º 42
0
allprotocols = []


def normalizedFilename(f):
    if f == '-':
        return '<stdin>'
    return f


log(2, 'Reading sync message list')
parser = RawConfigParser()
parser.read_file(open(options.syncMsgList))
syncMsgList = parser.sections()

for section in syncMsgList:
    if not parser.get(section, "description"):
        print('Error: Sync message %s lacks a description' % section,
              file=sys.stderr)
        sys.exit(1)

# Read message metadata. Right now we only have 'segment_capacity'
# for the standard segment size used for serialization.
log(2, 'Reading message metadata...')
msgMetadataConfig = RawConfigParser()
msgMetadataConfig.read_file(open(options.msgMetadata))

segmentCapacityDict = {}
for msgName in msgMetadataConfig.sections():
    if msgMetadataConfig.has_option(msgName, 'segment_capacity'):
        capacity = msgMetadataConfig.get(msgName, 'segment_capacity')
        segmentCapacityDict[msgName] = capacity
Exemplo n.º 43
0
 def getfloat(self, section, option):
     try:
         return float(RawConfigParser.get(self, section, option))
     except NoOptionError:
         return None
Exemplo n.º 44
0
class MrxsFile(object):
    def __init__(self, filename):
        # Split filename
        dirname, ext = os.path.splitext(filename)
        if ext != '.mrxs':
            raise UnrecognizedFile

        # Parse slidedat
        self._slidedatfile = os.path.join(dirname, 'Slidedat.ini')
        self._dat = RawConfigParser()
        self._dat.optionxform = str
        try:
            with open(self._slidedatfile, 'r', encoding="utf-8-sig") as fh:
                self._have_bom = (fh.read(len(UTF8_BOM)) == UTF8_BOM)
                if not self._have_bom:
                    fh.seek(0)
                self._dat.read_file(fh)
        except IOError:
            raise UnrecognizedFile

        # Get file paths
        self._indexfile = os.path.join(
            dirname, self._dat.get(MRXS_HIERARCHICAL, 'INDEXFILE'))
        self._datafiles = [
            os.path.join(dirname, self._dat.get('DATAFILE', 'FILE_%d' % i))
            for i in range(self._dat.getint('DATAFILE', 'FILE_COUNT'))
        ]

        # Build levels
        self._make_levels()

    def _make_levels(self):
        self._levels = {}
        self._level_list = []
        layer_count = self._dat.getint(MRXS_HIERARCHICAL, 'NONHIER_COUNT')
        for layer_id in range(layer_count):
            level_count = self._dat.getint(MRXS_HIERARCHICAL,
                                           'NONHIER_%d_COUNT' % layer_id)
            for level_id in range(level_count):
                level = MrxsNonHierLevel(self._dat, layer_id, level_id,
                                         len(self._level_list))
                self._levels[(level.layer_name, level.name)] = level
                self._level_list.append(level)

    @classmethod
    def _read_int32(cls, f):
        buf = f.read(4)
        if len(buf) != 4:
            raise IOError('Short read')
        return struct.unpack('<i', buf)[0]

    @classmethod
    def _assert_int32(cls, f, value):
        v = cls._read_int32(f)
        if v != value:
            raise ValueError('%d != %d' % (v, value))

    def _get_data_location(self, record):
        with open(self._indexfile, 'rb') as fh:
            fh.seek(MRXS_NONHIER_ROOT_OFFSET)
            # seek to record
            table_base = self._read_int32(fh)
            fh.seek(table_base + record * 4)
            # seek to list head
            list_head = self._read_int32(fh)
            fh.seek(list_head)
            # seek to data page
            self._assert_int32(fh, 0)
            page = self._read_int32(fh)
            fh.seek(page)
            # check pagesize
            self._assert_int32(fh, 1)
            # read rest of prologue
            self._read_int32(fh)
            self._assert_int32(fh, 0)
            self._assert_int32(fh, 0)
            # read values
            position = self._read_int32(fh)
            size = self._read_int32(fh)
            fileno = self._read_int32(fh)
            return (self._datafiles[fileno], position, size)

    def _zero_record(self, record):
        path, offset, length = self._get_data_location(record)
        with open(path, 'r+b') as fh:
            fh.seek(0, 2)
            do_truncate = (fh.tell() == offset + length)
            if DEBUG:
                if do_truncate:
                    print('Truncating', path, 'to', offset)
                else:
                    print('Zeroing', path, 'at', offset, 'for', length)
            fh.seek(offset)
            buf = fh.read(len(JPEG_SOI))
            # print(buf)
            # exit()
            if buf != JPEG_SOI:
                raise IOError('Unexpected data in nonhier image')
            if do_truncate:
                fh.truncate(offset)
            else:
                fh.seek(offset)
                fh.write('\0' * length)

    def _delete_index_record(self, record):
        if DEBUG:
            print('Deleting record', record)
        with open(self._indexfile, 'r+b') as fh:
            entries_to_move = len(self._level_list) - record - 1
            if entries_to_move == 0:
                return
            # get base of table
            fh.seek(MRXS_NONHIER_ROOT_OFFSET)
            table_base = self._read_int32(fh)
            # read tail of table
            fh.seek(table_base + (record + 1) * 4)
            buf = fh.read(entries_to_move * 4)
            if len(buf) != entries_to_move * 4:
                raise IOError('Short read')
            # overwrite the target record
            fh.seek(table_base + record * 4)
            fh.write(buf)

    def _hier_keys_for_level(self, level):
        ret = []
        for k, _ in self._dat.items(MRXS_HIERARCHICAL):
            if k == level.key_prefix or k.startswith(level.key_prefix + '_'):
                ret.append(k)
        return ret

    def _rename_section(self, old, new):
        if self._dat.has_section(old):
            if DEBUG:
                print('[%s] -> [%s]' % (old, new))
            self._dat.add_section(new)
            for k, v in self._dat.items(old):
                self._dat.set(new, k, v)
            self._dat.remove_section(old)
        elif DEBUG:
            print('[%s] does not exist' % old)

    def _delete_section(self, section):
        if DEBUG:
            print('Deleting [%s]' % section)
        self._dat.remove_section(section)

    def _set_key(self, section, key, value):
        if DEBUG:
            prev = self._dat.get(section, key)
            print('[%s] %s: %s -> %s' % (section, key, prev, value))
        self._dat.set(section, key, value)

    def _rename_key(self, section, old, new):
        if DEBUG:
            print('[%s] %s -> %s' % (section, old, new))
        v = self._dat.get(section, old)
        self._dat.remove_option(section, old)
        self._dat.set(section, new, v)

    def _delete_key(self, section, key):
        if DEBUG:
            print('Deleting [%s] %s' % (section, key))
        self._dat.remove_option(section, key)

    def _write(self):
        buf = StringIO()
        self._dat.write(buf)
        with open(self._slidedatfile, 'wb') as fh:
            if self._have_bom:
                fh.write(UTF8_BOM.encode())
            fh.write(buf.getvalue().replace('\n', '\r\n').encode())

    def delete_level(self, layer_name, level_name):
        level = self._levels[(layer_name, level_name)]
        record = level.record

        # Zero image data
        self._zero_record(record)

        # Delete pointer from nonhier table in index
        self._delete_index_record(record)

        # Remove slidedat keys
        for k in self._hier_keys_for_level(level):
            self._delete_key(MRXS_HIERARCHICAL, k)

        # Remove slidedat section
        self._delete_section(level.section)

        # Rename section and keys for subsequent levels in the layer
        prev_level = level
        for cur_level in self._level_list[record + 1:]:
            if cur_level.layer_id != prev_level.layer_id:
                break
            for k in self._hier_keys_for_level(cur_level):
                new_k = k.replace(cur_level.key_prefix, prev_level.key_prefix,
                                  1)
                self._rename_key(MRXS_HIERARCHICAL, k, new_k)
            self._set_key(MRXS_HIERARCHICAL, prev_level.section_key,
                          prev_level.section)
            self._rename_section(cur_level.section, prev_level.section)
            prev_level = cur_level

        # Update level count within layer
        count_k = 'NONHIER_%d_COUNT' % level.layer_id
        count_v = self._dat.getint(MRXS_HIERARCHICAL, count_k)
        self._set_key(MRXS_HIERARCHICAL, count_k, count_v - 1)

        # Write slidedat
        self._write()

        # Refresh metadata
        self._make_levels()
import os
from configparser import RawConfigParser

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
JSON_PATH = os.path.join(BASE_DIR, 'json')
CONF_PATH = os.path.join(BASE_DIR, 'conf')

CONFIG = RawConfigParser()
CONFIG.read(os.path.join(CONF_PATH, 'local.conf'), encoding='utf-8')

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = CONFIG.get('main', 'SECRET_KEY')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = CONFIG.getboolean('main', 'DEBUG')

ALLOWED_HOSTS = []

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
    'django.contrib.staticfiles',
Exemplo n.º 46
0
      '2、若一直连不上javbus,请在ini中更新防屏蔽网址\n'
      '3、找不到AV信息,请在javbus上确认,再修改本地视频文件名,如:\n'
      '   1)多余的字母数字:[JAV] [Uncensored] HEYZO 2171 [1080p].mp4 => HEYZO 2171.mp4\n'
      '                  112314-742-carib-1080p.mp4 => 112314-742.mp4\n'
      '                  Heyzo_HD_0733_full.mp4 => Heyzo_0733.mp4\n'
      '   2)多余的横杠:sr-131.mp4 => sr131.mp4\n'
      '   3)干扰车牌的分集:Heyzo_0733_01.mp4 => Heyzo_0733啊.mp4\n'
      '                     Heyzo_0733_02.mp4 => Heyzo_0733吧.mp4\n')
# 读取配置文件,这个ini文件用来给用户设置
print('\n正在读取ini中的设置...', end='')
try:
    config_settings = RawConfigParser()
    config_settings.read('【点我设置整理规则】.ini', encoding='utf-8-sig')
    ####################################################################################################################
    # 是否 收集nfo
    bool_nfo = True if config_settings.get("收集nfo", "是否收集nfo?") == '是' else False
    # 是否 跳过已存在nfo的文件夹,不整理已有nfo的文件夹
    bool_skip = True if config_settings.get("收集nfo", "是否跳过已存在nfo的文件夹?") == '是' else False
    # 自定义 nfo中title的格式
    custom_nfo_title = config_settings.get("收集nfo", "nfo中title的格式")
    # 是否 去除 标题 末尾可能存在的演员姓名
    bool_strip_actors = True if config_settings.get("收集nfo", "是否去除标题末尾可能存在的演员姓名?") == '是' else False
    # 自定义 将系列、片商等元素作为特征,因为emby不会直接在影片介绍页面上显示片商,也不会读取系列set
    custom_genres = config_settings.get("收集nfo", "额外将以下元素添加到特征中")
    # 是否 将特征保存到风格中
    bool_genre = True if config_settings.get("收集nfo", "是否将特征保存到genre?") == '是' else False
    # 是否 将 片商 作为特征
    bool_tag = True if config_settings.get("收集nfo", "是否将特征保存到tag?") == '是' else False
    ####################################################################################################################
    # 是否 重命名 视频
    bool_rename_mp4 = True if config_settings.get("重命名影片", "是否重命名影片?") == '是' else False
Exemplo n.º 47
0
import qiskit
from qiskit import IBMQ, ClassicalRegister, QuantumRegister, QuantumCircuit, execute, Aer
from qiskit.backends.ibmq import least_busy
from configparser import RawConfigParser

# Setup the API key for the real quantum computer.
parser = RawConfigParser()
parser.read('config.ini')
IBMQ.enable_account(parser.get('IBM', 'key'))

# Setup 3 qubits.
q = QuantumRegister(3)
c = ClassicalRegister(3)
qc = QuantumCircuit(q, c)

# Place the qubits into superposition so the qubits no longer hold a distinct value, but instead are both 0 and 1 at the same time (50% 0, 50% 1).
qc.h(q)
qc.measure(q, c)


# Using the qubits and their random value, form a response.
def answer(result):
    for key in result.keys():
        state = key
    print("The Quantum 8-ball says:")
    if state == '000':
        print('It is certain.')
    elif state == '001':
        print('Without a doubt.')
    elif state == '010':
        print('Yes - deinitely.')
Exemplo n.º 48
0
"""

import os
from configparser import RawConfigParser

config = RawConfigParser()
config.read('/etc/bg_settings/settings.ini')

# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.get('secrets', 'SECRET_KEY')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config.getboolean('django', 'DEBUG')

ALLOWED_HOSTS = ['*']

# Application definition

INSTALLED_APPS = [
    'django.contrib.admin',
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
    'django.contrib.staticfiles',
Exemplo n.º 49
0
    else:
        available.sync()


if __name__ == '__main__':
    calendars = []
    busy_statuses = []
    available_statuses = []

    config = RawConfigParser()
    config.read('local.cfg')
    for section in config.sections():
        if section.startswith('caldav'):
            calendars.append(
                CalDavCalendar(
                    DAVClient(config.get(section, 'url'),
                              username=config.get(section, 'username'),
                              password=config.get(section, 'password')),
                    SimpleCalEventsIntervals(dateutil.tz.tzlocal())))
        elif section.startswith('ics'):
            calendars.append(
                IcsCalendar(config.get(section, 'url'),
                            SimpleCalEventsIntervals(dateutil.tz.tzlocal())))
        elif section.startswith('slack'):
            slack = IgnoreDisconnectApi(
                SimpleSlackApi(SlackClient(config.get(section, 'token'))))
            busy_statuses.append(
                SlackStatus(slack, config.get(section, 'busy_text'),
                            config.get(section, 'busy_emoji')))
            available_statuses.append(
                SlackStatus(slack, config.get(section, 'available_text'),
Exemplo n.º 50
0
class APIVersionWriter(TemplateFileWriter):
    """ Provide useful method to write Java files.

    """
    def __init__(self, monolithe_config, api_info):
        """ Initializes a _JavaSDKAPIVersionFileWriter

        """
        super(APIVersionWriter, self).__init__(package="monolithe.generators.lang.java")

        self.api_version = api_info["version"]
        self._api_version_string = SDKUtils.get_string_version(self.api_version)
        self.api_root = api_info["root"]
        self.api_prefix = api_info["prefix"]

        self.monolithe_config = monolithe_config
        self._output = self.monolithe_config.get_option("output", "transformer")
        self._name = self.monolithe_config.get_option("name", "transformer")
        self._class_prefix = ""
        self._product_accronym = self.monolithe_config.get_option("product_accronym")
        self._product_name = self.monolithe_config.get_option("product_name")
        self._url = self.monolithe_config.get_option("url", "transformer")

        self._package_prefix = self._get_package_prefix(self._url)
        self._package_name = self._package_prefix + '.' + self._name + '.' + SDKUtils.get_string_version(self.api_version)
        self._package_subdir = self._package_name.replace('.', '/')

        self._base_output_directory = "%s/java" % (self._output)
        self.output_directory = "%s/src/main/java/%s" % (self._base_output_directory, self._package_subdir)
        self.override_folder = os.path.normpath("%s/__overrides" % self._base_output_directory)
        self.fetchers_path = "/fetchers/"

        self.attrs_defaults = RawConfigParser()
        path = "%s/java/__attributes_defaults/attrs_defaults.ini" % self._output
        self.attrs_defaults.optionxform = str
        self.attrs_defaults.read(path)

        self.attrs_types = RawConfigParser()
        path = "%s/java/__attributes_defaults/attrs_types.ini" % self._output
        self.attrs_types.optionxform = str
        self.attrs_types.read(path)

        library_info = RawConfigParser()
        path = "%s/java/__attributes_defaults/library.ini" % self._output
        library_info.optionxform = str
        library_info.read(path)
        self.library_version = library_info.get(self.api_version, "libraryVersion")

        with open("%s/java/__code_header" % self._output, "r") as f:
            self.header_content = f.read()

    def perform(self, specifications):
        """
        """
        self._set_enum_list_local_type(specifications)
        self._write_info()
        self._write_session()
        self._write_build_file()

        task_manager = TaskManager()
        for rest_name, specification in specifications.items():
            task_manager.start_task(method=self._write_model, specification=specification, specification_set=specifications)
            task_manager.start_task(method=self._write_fetcher, specification=specification, specification_set=specifications)
        task_manager.wait_until_exit()

    def _write_session(self):
        """ Write SDK session file

            Args:
                version (str): the version of the server

        """
        base_name = "%sSession" % self._product_accronym
        filename = "%s%s.java" % (self._class_prefix, base_name)
        override_content = self._extract_override_content(base_name)

        self.write(destination=self.output_directory,
                   filename=filename,
                   template_name="session.java.tpl",
                   version=self.api_version,
                   product_accronym=self._product_accronym,
                   class_prefix=self._class_prefix,
                   root_api=self.api_root,
                   name=self._name,
                   api_prefix=self.api_prefix,
                   override_content=override_content,
                   header=self.header_content,
                   version_string=self._api_version_string,
                   package_name=self._package_name)

    def _write_info(self):
        """ Write API Info file
        """
        self.write(destination=self.output_directory,
                   filename="SdkInfo.java",
                   template_name="sdkinfo.java.tpl",
                   version=self.api_version,
                   product_accronym=self._product_accronym,
                   class_prefix=self._class_prefix,
                   root_api=self.api_root,
                   api_prefix=self.api_prefix,
                   product_name=self._product_name,
                   name=self._name,
                   header=self.header_content,
                   version_string=self._api_version_string,
                   package_name=self._package_name)

    def _write_model(self, specification, specification_set):
        """ Write autogenerate specification file

        """
        filename = "%s%s.java" % (self._class_prefix, specification.entity_name)

        override_content = self._extract_override_content(specification.entity_name)
        superclass_name = "RestRootObject" if specification.rest_name == self.api_root else "RestObject"

        defaults = {}
        section = specification.entity_name
        if self.attrs_defaults.has_section(section):
            for attribute in self.attrs_defaults.options(section):
                defaults[attribute] = self.attrs_defaults.get(section, attribute)

        self.write(destination=self.output_directory,
                   filename=filename,
                   template_name="model.java.tpl",
                   specification=specification,
                   specification_set=specification_set,
                   version=self.api_version,
                   name=self._name,
                   class_prefix=self._class_prefix,
                   product_accronym=self._product_accronym,
                   override_content=override_content,
                   superclass_name=superclass_name,
                   header=self.header_content,
                   version_string=self._api_version_string,
                   package_name=self._package_name,
                   attribute_defaults=defaults)

        return (filename, specification.entity_name)

    def _write_fetcher(self, specification, specification_set):
        """ Write fetcher

        """
        destination = "%s%s" % (self.output_directory, self.fetchers_path)
        base_name = "%sFetcher" % specification.entity_name_plural
        filename = "%s%s.java" % (self._class_prefix, base_name)
        override_content = self._extract_override_content(base_name)

        self.write(destination=destination,
                   filename=filename,
                   template_name="fetcher.java.tpl",
                   specification=specification,
                   specification_set=specification_set,
                   class_prefix=self._class_prefix,
                   product_accronym=self._product_accronym,
                   override_content=override_content,
                   header=self.header_content,
                   name=self._name,
                   version_string=self._api_version_string,
                   package_name=self._package_name)

        return (filename, specification.entity_name_plural)

    def _write_build_file(self):
        """ Write Maven build file (pom.xml)

        """
        self.write(destination=self._base_output_directory,
                   filename="pom.xml",
                   template_name="pom.xml.tpl",
                   version=self.api_version,
                   product_accronym=self._product_accronym,
                   class_prefix=self._class_prefix,
                   root_api=self.api_root,
                   api_prefix=self.api_prefix,
                   product_name=self._product_name,
                   name=self._name,
                   header=self.header_content,
                   version_string=self._api_version_string,
                   package_prefix=self._package_prefix,
                   library_version=self.library_version)

    def _extract_override_content(self, name):
        """
        """
        # find override file
        specific_override_path = "%s/%s_%s%s.override.java" % (self.override_folder, self.api_version, self._class_prefix, name.title())
        generic_override_path = "%s/%s%s.override.java" % (self.override_folder, self._class_prefix, name.title())
        final_path = specific_override_path if os.path.exists(specific_override_path) else generic_override_path

        # Read override from file
        override_content = None
        if os.path.isfile(final_path):
            override_content = open(final_path).read()

        return override_content

    def _get_package_prefix(self, url):
        """
        """
        hostname_parts = self._get_hostname_parts(url)

        package_name = ""
        for index, hostname_part in enumerate(reversed(hostname_parts)):
            package_name = package_name + hostname_part
            if index < len(hostname_parts) - 1:
                package_name = package_name + '.'

        return package_name

    def _get_hostname_parts(self, url):
        """
        """
        if url.find("http://") != 0:
            url = "http://" + url

        hostname = urlparse(url).hostname
        hostname_parts = hostname.split('.')

        valid_hostname_parts = []
        for hostname_part in hostname_parts:
            if hostname_part != "www":
                valid_hostname_parts.append(hostname_part)

        return valid_hostname_parts

    def _set_enum_list_local_type(self, specifications):
        """ This method is needed until get_type_name() is enhanced to include specification subtype and local_name
        """
        for rest_name, specification in specifications.items():
            for attribute in specification.attributes:
                if attribute.type == "enum":
                    enum_type = attribute.local_name[0:1].upper() + attribute.local_name[1:]
                    attribute.local_type = enum_type
                elif attribute.type == "object":
                    attr_type = "Object"
                    if self.attrs_types.has_option(specification.entity_name, attribute.local_name):
                        type = self.attrs_types.get(specification.entity_name, attribute.local_name)
                        if type:
                            attr_type = type
                    attribute.local_type = attr_type
                elif attribute.type == "list":
                    if attribute.subtype == "enum":
                        enum_subtype = attribute.local_name[0:1].upper() + attribute.local_name[1:]
                        attribute.local_type = "java.util.List<" + enum_subtype + ">"
                    elif attribute.subtype == "object":
                        attr_subtype = "com.fasterxml.jackson.databind.JsonNode"
                        if self.attrs_types.has_option(specification.entity_name, attribute.local_name):
                            subtype = self.attrs_types.get(specification.entity_name, attribute.local_name)
                            if subtype:
                                attr_subtype = subtype
                        attribute.local_type = "java.util.List<" + attr_subtype + ">"
                    elif attribute.subtype == "entity":
                        attribute.local_type = "java.util.List<com.fasterxml.jackson.databind.JsonNode>"
                    else:
                        attribute.local_type = "java.util.List<String>"
Exemplo n.º 51
0
from .base import *

if 'TRAVIS' in os.environ:
    SECRET_KEY = os.environ['SECRET_KEY']
else:
    from configparser import RawConfigParser
    config = RawConfigParser()
    try:
        config.read(os.path.join(BASE_DIR, 'ecom/config/secrets.ini'))
        SECRET_KEY = config.get('django', 'SECRET_KEY')
    except Exception as e:
        print(f'Error reading secrets.ini: {e}')

DEBUG = True

ALLOWED_HOSTS = []


Exemplo n.º 52
0
PROJECT_NAME = 'jet_demo'

production_config = os.path.join('/usr/local/etc', PROJECT_NAME,
                                 '{0}.conf'.format(PROJECT_NAME))
development_config = os.path.join(BASE_DIR, 'conf',
                                  '{0}.conf'.format(PROJECT_NAME))

config_path = production_config if os.path.exists(
    production_config) else development_config
config.read(config_path)

# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.6/howto/deployment/checklist/

# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config.get('common', 'secret_key')

# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config.getboolean('common', 'debug')

ALLOWED_HOSTS = ['*']

LOGIN_REDIRECT_URL = 'home'
LOGOUT_REDIRECT_URL = 'home'
# Application definition

INSTALLED_APPS = (
    'django.contrib.auth',
    'django.contrib.contenttypes',
    'django.contrib.sessions',
    'django.contrib.messages',
Exemplo n.º 53
0
if not os.path.exists(conffile):
    raise Exception("config file " + conffile + " not found")


# read config and launch
config.read(conffile)


client = mqtt.Client()
client2 = mqtt.Client()

client.on_connect = on_connect
client.on_message = on_message


username = config.get("agents","username")
password = config.get("agents","password")
mqttbroker = config.get("agents","mqttbroker")

client.username_pw_set(username,password)
client.connect(mqttbroker, 1883, 5)

client2.username_pw_set(username,password)
client2.connect(mqttbroker, 1883, 5)


# Blocking call that processes network traffic, dispatches callbacks and
# handles reconnecting.
# Other loop*() functions are available that give a threaded interface and a
# manual interface.
Exemplo n.º 54
0
import Adafruit_DHT
import time
import datetime
from rethinkdb import r
from configparser import RawConfigParser

config = RawConfigParser()
config.read('appconfig.ini')

r.connect(config.get('rethinkdb', 'host'), config.get('rethinkdb',
                                                      'port')).repl()

sensor = Adafruit_DHT.DHT11

while True:
    log_time = datetime.datetime.now().strftime("%d-%m-%Y %H:%M:%S")
    humidity, temperature = Adafruit_DHT.read_retry(sensor, 21)

    print(f"temp: {temperature} hum: {humidity} at {log_time}")
    r.db('rpi').table(config.get('rethinkdb', 'temp_hum_table')).insert({
        'room_id':
        'bedroom_1',
        'temp':
        temperature,
        'hum':
        humidity,
        'date':
        log_time
    }).run()

    time.sleep(1)
Exemplo n.º 55
0
class Config(object):
    """
    Manages Bonsai configuration environments.

    Configuration information is pulled from different locations. This class
    helps keep it organized. Configuration information comes from environment
    variables, the user `~./.bonsai` file, a local `./.bonsai` file, the
    `./.brains` file, command line arguments, and finally, parameters
    overridden in code.

    An optional `profile` key can be used to switch between different
    profiles stored in the `~/.bonsai` configuration file. The users active
    profile is selected if none is specified.

    Attributes:
        accesskey:     Users access key from the web.
                        (Example: 00000000-1111-2222-3333-000000000001)
        username:      Users login name.
        url:           URL of the server to connect to.
                        (Example: "https://api.bons.ai")
        brain:         Name of the BRAIN to use.
        predict:       True is predicting against a BRAIN, False for training.
        brain_version: Version number of the brain to use for prediction.
        proxy:         Server name and port number of proxy to connect through.
                        (Example: "localhost:9000")

    Example Usage:
        import sys, bonsai_ai
        config = bonsai_ai.Config(sys.argv)
        print(config)
        if config.predict:
            ...

    """
    def __init__(self, argv=sys.argv, profile=None):
        """
        Construct Config object with program arguments.
        Pass in sys.argv for command-line arguments and an
        optional profile name to select a specific profile.

        Arguments:
            argv:    A list of argument strings.
            profile: The name of a profile to select. (optional)
        """
        self.accesskey = None
        self.username = None
        self.url = None
        self.use_color = True

        self.brain = None

        self.predict = False
        self.brain_version = 0
        self._proxy = None
        self._retry_timeout_seconds = 300
        self._pong_interval_seconds = 15.0
        self._network_timeout_seconds = 60

        self.verbose = False
        self.record_file = None
        self.record_enabled = False
        self.file_paths = set()
        self._config_parser = RawConfigParser(allow_no_value=True)
        self._read_config()
        self.profile = profile

        self._parse_env()
        self._parse_config(_DEFAULT)
        self._parse_config(profile)
        self._parse_brains()
        self._parse_args(argv)

        # parse args works differently in 2.7
        if sys.version_info >= (3, 0):
            self._parse_legacy(argv)

    def __repr__(self):
        """ Prints out a JSON formatted string of the Config state. """
        return '{{'\
            '\"profile\": \"{self.profile!r}\", ' \
            '\"accesskey\": \"{self.accesskey!r}\", ' \
            '\"username\": \"{self.username!r}\", ' \
            '\"brain\": \"{self.brain!r}\", ' \
            '\"url\": \"{self.url!r}\", ' \
            '\"use_color\": \"{self.use_color!r}\", ' \
            '\"predict\": \"{self.predict!r}\", ' \
            '\"brain_version\": \"{self.brain_version!r}\", ' \
            '\"proxy\": \"{self.proxy!r}\", ' \
            '\"retry_timeout\": \"{self.retry_timeout!r}\", ' \
            '\"pong_interval\": \"{self.pong_interval!r}\", ' \
            '\"network_timeout\": \"{self.network_timeout!r}\" ' \
            '}}'.format(self=self)

    @property
    def proxy(self):
        # shell-local environment vars get top precedence, falling back to
        # OS-specific registry/configuration values
        if self._proxy is not None:
            return self._proxy
        proxy_dict = getproxies()
        proxy = proxy_dict.get(_ALL_PROXY, None)
        http_proxy = proxy_dict.get(_HTTP_PROXY, None)
        if http_proxy is not None:
            proxy = http_proxy

        if self.url is not None:
            uri = urlparse(self.url)
            if uri.scheme == 'https':
                https_proxy = proxy_dict.get(_HTTPS_PROXY, None)
                if https_proxy is not None:
                    proxy = https_proxy

        return proxy

    @proxy.setter
    def proxy(self, proxy):
        uri = urlparse(proxy)
        uri.port
        self._proxy = proxy

    @property
    def record_format(self):
        """ The log record format, as inferred from the extension of
        the log filename"""
        _, fmt = splitext(self.record_file)
        return fmt

    @property
    def retry_timeout(self):
        return self._retry_timeout_seconds

    @retry_timeout.setter
    def retry_timeout(self, value):
        value = int(value)
        if value < -1:
            raise ValueError(
                'Retry timeout must be a positive integer, 0, or -1.')
        self._retry_timeout_seconds = value

    @property
    def pong_interval(self):
        return self._pong_interval_seconds

    @pong_interval.setter
    def pong_interval(self, value):
        value = float(value)
        if value == 0 or (value >= 1 and value < 240):
            self._pong_interval_seconds = value
        else:
            raise ValueError(
                'Pong interval must be equal to 0 (No pongs) or '
                'greater than 1 second and less than 240 seconds.')

    @property
    def network_timeout(self):
        return self._network_timeout_seconds

    @network_timeout.setter
    def network_timeout(self, value):
        value = int(value)
        if value < 1:
            raise ValueError(
                'Network timeout must be a positive integer.')
        self._network_timeout_seconds = value

    def _parse_env(self):
        ''' parse out environment variables used in hosted containers '''
        self.brain = environ.get(_BONSAI_TRAIN_BRAIN, None)
        headless = environ.get(_BONSAI_HEADLESS, None)
        if headless == 'True':
            self.headless = True

    def _parse_config(self, profile):
        ''' parse both the '~/.bonsai' and './.bonsai' config files. '''

        # read the values
        def assign_key(key):
            if self._config_parser.has_option(section, key):
                if key.lower() == _USE_COLOR.lower():
                    self.__dict__[key] = self._config_parser.getboolean(section, key)
                else:
                    self.__dict__[key] = self._config_parser.get(section, key)

        # get the profile
        section = _DEFAULT
        if profile is None:
            if self._config_parser.has_option(_DEFAULT, _PROFILE):
                section = self._config_parser.get(_DEFAULT, _PROFILE)
                self.profile = section
        else:
            section = profile

        assign_key(_ACCESSKEY)
        assign_key(_USERNAME)
        assign_key(_URL)
        assign_key(_PROXY)
        assign_key(_USE_COLOR)

        # if url is none set it to default bonsai api url
        if self.url is None:
            self.url = _DEFAULT_URL

    def _parse_brains(self):
        ''' parse the './.brains' config file
            Example:
                {"brains": [{"default": true, "name": "test"}]}
        '''
        data = {}
        try:
            with open(_DOT_BRAINS) as file:
                data = json.load(file)

                # parse file now
                for brain in data['brains']:
                    if brain['default'] is True:
                        self.brain = brain['name']
                        return

        # except FileNotFoundError: python3
        except IOError as e:
            return

    def _parse_legacy(self, argv):
        ''' print support for legacy CLI arguments '''
        if sys.version_info >= (3, 0):
            optional = ArgumentParser(
                description="",
                allow_abbrev=False,
                add_help=False)
        else:
            optional = ArgumentParser(
                description="",
                add_help=False)

        optional.add_argument(
            '--legacy',
            action='store_true',
            help='Legacy command line options')
        optional.add_argument('--train-brain', help=_TRAIN_BRAIN_HELP)
        optional.add_argument('--predict-brain', help=_PREDICT_BRAIN_HELP)
        optional.add_argument('--predict-version', help=_PREDICT_VERSION_HELP)
        optional.add_argument('--recording-file', help=_RECORDING_FILE_HELP)
        args, remainder = optional.parse_known_args(argv)

        if args.train_brain is not None:
            self.brain = args.train_brain
            self.predict = False

        if args.predict_version is not None:
            self.predict = True
            if args.predict_version == "latest":
                self.brain_version = 0
            else:
                self.brain_version = int(args.predict_version)

    def _parse_args(self, argv):
        ''' parser command line arguments '''
        if sys.version_info >= (3, 0):
            parser = ArgumentParser(allow_abbrev=False)
        else:
            parser = ArgumentParser()

        parser.add_argument(
            '--accesskey', '--access-key', help=_ACCESS_KEY_HELP)
        parser.add_argument('--username', help=_USERNAME_HELP)
        parser.add_argument('--url', help=_URL_HELP)
        parser.add_argument('--proxy', help=_PROXY_HELP)
        parser.add_argument('--brain', help=_BRAIN_HELP)
        parser.add_argument(
            '--predict',
            help=_PREDICT_HELP,
            nargs='?',
            const='latest',
            default=None)
        parser.add_argument('--verbose', action='store_true',
                            help=_VERBOSE_HELP)
        parser.add_argument('--performance', action='store_true',
                            help=_PERFORMANCE_HELP)
        parser.add_argument('--log', nargs='+', help=_LOG_HELP)
        parser.add_argument('--record', nargs=1, default=None,
                            help=_RECORD_HELP)
        parser.add_argument('--retry-timeout', type=int,
                            help=_RETRY_TIMEOUT_HELP)
        parser.add_argument('--pong-interval', type=float,
                            help=_PONG_INTERVAL_HELP)
        parser.add_argument('--network-timeout', type=int,
                            help=_NETWORK_TIMEOUT_HELP)

        args, remainder = parser.parse_known_args(argv[1:])

        if args.accesskey is not None:
            self.accesskey = args.accesskey

        if args.username is not None:
            self.username = args.username

        if args.url is not None:
            self.url = args.url

        if args.proxy is not None:
            self.proxy = args.proxy

        if args.brain is not None:
            self.brain = args.brain

        if args.verbose:
            self.verbose = args.verbose
            log.set_enable_all(args.verbose)

        if args.performance:
            # logging::log().set_enabled(true);
            # logging::log().set_enable_all_perf(true);
            pass

        if args.log is not None:
            for domain in args.log:
                log.set_enabled(domain)

        if args.record:
            self.record_file = args.record[0]
            self.record_enabled = True

        if args.retry_timeout is not None:
            self.retry_timeout = args.retry_timeout

        if args.pong_interval is not None:
            self.pong_interval = args.pong_interval

        if args.network_timeout is not None:
            self.network_timeout = args.network_timeout

        brain_version = None
        if args.predict is not None:
            if args.predict == "latest":
                brain_version = 0
            else:
                brain_version = args.predict
            self.predict = True

        # update brain_version after all args have been processed
        if brain_version is not None:
            brain_version = int(brain_version)
            if brain_version < 0:
                raise ValueError(
                    'BRAIN version number must be'
                    'positive integer or "latest".')
            self.brain_version = brain_version

    def _config_files(self):
        return [join(expanduser('~'), _DOT_BONSAI), join('.', _DOT_BONSAI)]

    def _read_config(self):
        # verify that at least one of the config files exists
        # as RawConfigParser ignores missing files
        found = False
        config_files = self._config_files()
        for path in config_files:
            if os.access(path, os.R_OK):
                found = True
                break
        if not found:
            # Write empty .bonsai to disk if no file is found
            self._write_dot_bonsai()

        self._config_parser.read(config_files)
        for path in config_files:
            if os.path.exists(path):
                self.file_paths.add(path)

    def _set_profile(self, section):
        # Create section if it does not exist
        if not self._config_parser.has_section(section) and section != _DEFAULT:
            self._config_parser.add_section(section)

        # Set profile in class and config
        self.profile = section
        if section == _DEFAULT:
            self._config_parser.set(_DEFAULT, _PROFILE, 'DEFAULT')
        else:
            self._config_parser.set(_DEFAULT, _PROFILE, str(section))

    def _write_dot_bonsai(self):
        """ Writes to .bonsai in users home directory """
        config_path = join(expanduser('~'), _DOT_BONSAI)
        with open(config_path, 'w') as f:
            self._config_parser.write(f)

    def _websocket_url(self):
        """ Converts api url to websocket url """
        api_url = self.url
        parsed_api_url = urlparse(api_url)

        if parsed_api_url.scheme == 'http':
            parsed_ws_url = parsed_api_url._replace(scheme='ws')
        elif parsed_api_url.scheme == 'https':
            parsed_ws_url = parsed_api_url._replace(scheme='wss')
        else:
            return None
        ws_url = urlunparse(parsed_ws_url)
        return ws_url

    def _has_section(self, section):
        """Checks the configuration to see if section exists."""
        if section == _DEFAULT:
            return True
        return self._config_parser.has_section(section)

    def _section_list(self):
        """ Returns a list of sections in config """
        return self._config_parser.sections()

    def _section_items(self, section):
        """ Returns a dictionary of items in a section """
        return self._config_parser.items(section)

    def _defaults(self):
        """ Returns an ordered dict of items in the DEFAULT section """
        return self._config_parser.defaults()

    def _update(self, **kwargs):
        """
        Updates the configuration with the Key/value pairs in kwargs and
        writes to the .bonsai file in the users home directory.
        """
        if not kwargs:
            return
        for key, value in kwargs.items():
            if key.lower() == _PROFILE.lower():
                self._set_profile(value)
            else:
                try:
                    self._config_parser.set(self.profile, key, str(value))
                except NoSectionError as e:
                    # Create and set default profile if it does not exist in .bonsai
                    self._set_profile(self.profile)
                    self._config_parser.set(self.profile, key, str(value))
        self._write_dot_bonsai()
        self._parse_config(self.profile)
Exemplo n.º 56
0
def read_yarrharr_conf(files, namespace):
    """
    Read the given configuration files, mutating the given dictionary to
    contain Django settings.

    :raises UnreadableConfError:
        if any of the given files are not read
    """
    conf = RawConfigParser()
    conf.read_file(StringIO(DEFAULT_CONF), "<defaults>")
    files_read = conf.read(files)
    files_unread = set(files) - set(files_read)
    if files_unread:
        raise UnreadableConfError(files_unread)

    namespace["DEBUG"] = conf.getboolean("yarrharr", "debug")

    namespace["DATABASES"] = {
        "default": {
            "ENGINE": conf.get("db", "engine"),
            "NAME": conf.get("db", "name"),
            "USER": conf.get("db", "user"),
            "PASSWORD": conf.get("db", "password"),
            "HOST": conf.get("db", "host"),
            "PORT": conf.get("db", "port"),
        },
    }
    namespace["ATOMIC_REQUESTS"] = True
    namespace["DEFAULT_AUTO_FIELD"] = "django.db.models.AutoField"

    external_url = urlparse(conf.get("yarrharr", "external_url"))
    if external_url.path != "":
        # Ensure that the URL doesn't contain a path, as some day we will
        # probably want to add the ability to add a prefix to the path.
        msg = "external_url must not include path: remove {!r}".format(
            external_url.path)
        raise ValueError(msg)
    namespace["ALLOWED_HOSTS"] = [external_url.hostname]

    # The proxied config is an enumeration to ensure it can be extended to
    # support the Forwarded header (RFC 7239) in the future. We require expicit
    # configuration rather than auto-detecting these headers because the
    # frontend proxy *must* be configured to strip whatever header is in use,
    # lest clients be able to forge it.
    proxied = conf.get("yarrharr", "proxied")
    if proxied not in {"no", "x-forwarded"}:
        msg = "proxied must be 'no' or 'x-forwarded', not {!r}".format(proxied)
        raise ValueError(msg)
    namespace["USE_X_FORWARDED_HOST"] = proxied == "x-forwarded"

    # Config for the Twisted production server.
    namespace["SERVER_ENDPOINT"] = conf.get("yarrharr", "server_endpoint")

    namespace["ROOT_URLCONF"] = "yarrharr.urls"
    namespace["LOGIN_URL"] = "login"
    namespace["LOGIN_REDIRECT_URL"] = "home"
    namespace["LOGOUT_URL"] = "logout"

    namespace["LANGUAGE_CODE"] = "en-us"
    namespace["USE_I18N"] = True
    namespace["USE_TZ"] = True
    namespace["TIME_ZONE"] = "UTC"

    namespace["STATIC_ROOT"] = conf.get("yarrharr", "static_root")
    namespace["STATIC_URL"] = conf.get("yarrharr", "static_url")
    namespace["STATICFILES_FINDERS"] = (
        "django.contrib.staticfiles.finders.AppDirectoriesFinder", )

    # Template context processors. This list is missing most of the processors
    # in the default list as Yarrharr's templates don't use them.
    context_processors = [
        "django.contrib.auth.context_processors.auth",
        "yarrharr.context_processors.csp",
    ]
    if namespace["DEBUG"]:
        # When in debug mode, display SQL queries for requests coming from the
        # loopback interface.
        context_processors.append("django.template.context_processors.debug")
        namespace["INTERNAL_IPS"] = ["127.0.0.1"]

    namespace["TEMPLATES"] = [{
        "BACKEND": "django.template.backends.django.DjangoTemplates",
        "DIRS": [],
        "APP_DIRS": True,
        "OPTIONS": {
            "context_processors": context_processors
        },
    }]

    namespace["SECRET_KEY"] = conf.get("secrets", "secret_key")
    namespace["X_FRAME_OPTIONS"] = "DENY"

    namespace["MIDDLEWARE"] = (
        "django.middleware.common.CommonMiddleware",
        "django.contrib.sessions.middleware.SessionMiddleware",
        "django.middleware.csrf.CsrfViewMiddleware",
        "django.contrib.auth.middleware.AuthenticationMiddleware",
        "django.middleware.clickjacking.XFrameOptionsMiddleware",
    )

    namespace[
        "SESSION_ENGINE"] = "django.contrib.sessions.backends.signed_cookies"
    namespace["SESSION_COOKIE_HTTPONLY"] = True
    namespace["SESSION_COOKIE_SECURE"] = external_url.scheme == "https"
    namespace["CSRF_COOKIE_SECURE"] = external_url.scheme == "https"
    namespace["CSRF_TRUSTED_ORIGINS"] = [
        urlunparse(external_url[0:2] + ("", "", "", ""))
    ]

    namespace["WSGI_APPLICATION"] = "yarrharr.wsgi.application"

    namespace["INSTALLED_APPS"] = (
        "django.contrib.auth",
        "django.contrib.contenttypes",
        "django.contrib.sessions",
        "django.contrib.staticfiles",
        "yarrharr",
    )

    if "runserver" not in sys.argv:
        # Disable Django's logging configuration stuff (except when running under
        # the dev server).
        namespace["LOGGING_CONFIG"] = None
        namespace["YARRHARR_SCRIPT_NONCE"] = True
    else:
        # Under the dev server send the same headers as the real Twisted server.
        # See yarrharr.application.Root.
        namespace["SECURE_REFERRER_POLICY"] = "same-origin"
        namespace["SECURE_CROSS_ORIGIN_OPENER_POLICY"] = "same-origin"
        namespace["YARRHARR_SCRIPT_NONCE"] = False

    return conf
Exemplo n.º 57
0
    def upload(self, subdir, filepath):
        if self.use_package_upload == 'http':
            import requests

            try:
                from configparser import RawConfigParser
            except:
                from ConfigParser import RawConfigParser

            config = RawConfigParser()
            config.read(os.path.expanduser("~/.passwd"))

            data = {
                "password": config.get('cgdo.ru', 'upload_password'),
                "subdir": subdir,
            }

            files = {
                "file": open(filepath, "rb"),
            }

            proxies = {}
            if self.use_proxy:
                proxies = {
                    "http": self.use_proxy,
                    "https": self.use_proxy,
                }

            sys.stdout.write("Uploading package '%s' to '%s'...\n" %
                             (filepath, subdir))
            requests.post("http://cgdo.ru/upload",
                          files=files,
                          data=data,
                          proxies=proxies)

        elif self.use_package_upload == 'ftp':
            try:
                from configparser import ConfigParser
            except:
                from ConfigParser import ConfigParser

            config = ConfigParser()
            config.read(os.path.expanduser("~/.passwd"))

            now = datetime.datetime.now()
            subdir = now.strftime("%Y%m%d")

            cmd = None

            if sys.platform == 'win32':
                ftpScriptFilepath = os.path.join(
                    tempfile.gettempdir(), "blender_for_vray_upload.txt")

                with open(ftpScriptFilepath, 'w') as f:
                    f.write('option batch abort\n')
                    f.write('option confirm off\n')
                    f.write(
                        'open ftp://%s:%s@%s -rawsettings ProxyMethod=%s ProxyHost=%s ProxyPort=%s\n'
                        % (
                            config.get('nightlies.ftp', 'user'),
                            config.get('nightlies.ftp', 'pass'),
                            config.get('nightlies.ftp', 'host'),
                            config.get('nightlies.ftp', 'proxy_type'),
                            config.get('nightlies.ftp', 'proxy_host'),
                            config.get('nightlies.ftp', 'proxy_port'),
                        ))
                    f.write('option transfer binary\n')
                    f.write('put %s /%s/\n' % (filepath, subdir))
                    f.write('exit\n')
                    f.write('\n')

                cmd = ['winscp']
                cmd.append('/passive')
                cmd.append('/script="%s"' % ftpScriptFilepath)

                if not self.mode_test:
                    os.system(' '.join(cmd))

            else:
                cmd = ['curl']
                cmd.append('--no-epsv')
                if self.use_proxy:
                    cmd.append('--proxy')
                    cmd.append(self.use_proxy)
                cmd.append('--user')
                cmd.append('%s:%s' % (
                    config.get('nightlies.ftp', 'user'),
                    config.get('nightlies.ftp', 'pass'),
                ))
                cmd.append('--upload-file')
                cmd.append(filepath)
                cmd.append('ftp://%s/%s/' % (
                    config.get('nightlies.ftp', 'host'),
                    subdir,
                ))

                if not self.mode_test:
                    subprocess.call(cmd)

            if self.mode_test:
                print(' '.join(cmd))
Exemplo n.º 58
0
    def __init__(self,
                 simulation=True,
                 hardware=False,
                 server='',
                 diminution=1,
                 factor_sim=40,
                 config='',
                 interactive=True,
                 webserver='',
                 joystick=''):
        if config == '' or joystick == '':
            cfg_path = path.join(path.dirname(__file__), '..', 'config',
                                 'default.cfg')
            cfg_parser = RawConfigParser()
            cfg_parser.read(cfg_path)
            if config == '':
                config = cfg_parser.get('DEFAULT', 'config')
            if joystick == '':
                joystick = cfg_parser.get('DEFAULT', 'joystick')

        if not path.isfile(config):
            config = path.join(path.dirname(__file__), '..', 'config', config)
        if not path.isfile(config):
            raise IOError("Config file '{}' not found".format(config))

        if not path.isfile(joystick):
            joystick = path.join(path.dirname(__file__), '..', 'config',
                                 joystick)
        if not path.isfile(joystick):
            raise IOError(
                "Joystick mapping file '{}' not found".format(joystick))

        try:
            with open(config, 'r') as f:
                self.config = load(f)
        except ValueError as e:
            raise ValueError(
                "Your configuration file {} has an incorrect format, make sure it is a valid JSON. {}"
                .format(config, str(e)))
        except IOError as e:
            raise IOError("Configuration file {} can't be read. {}".format(
                config, str(e)))

        try:
            with open(joystick, 'r') as f:
                self.joystick = load(f)
        except ValueError as e:
            raise ValueError(
                "Your joystick mapping file {} has an incorrect format, make sure it is a valid JSON. {}"
                .format(joystick, str(e)))
        except IOError as e:
            raise IOError("Joystick mapping file {} can't be read. {}".format(
                joystick, str(e)))

        self._simulation = simulation
        self._hardware = hardware
        self._server = server

        self.diminution = diminution
        self.height = len(self.config['mapping'])
        self.width = len(self.config['mapping'][0]) if self.height > 0 else 0

        if ('automapping' in self.config):
            (w, h, opt) = self.config['automapping']
            self.config['mapping'] = [[x + w * y for x in range(w)]
                                      for y in range(h)]
            self.height = h
            self.width = w
            if (opt == 'ZIGZAG'
                ):  #When leds are chained in zigzag, reverse odd lines
                for x in [k for k in range(h) if k % 2]:
                    self.config['mapping'][x] = self.config['mapping'][x][::-1]
            if (opt == 'ZAGZIG'
                ):  #When leds are chained in zigzag, reverse Even lines
                for x in [k for k in range(h) if not k % 2]:
                    self.config['mapping'][x] = self.config['mapping'][x][::-1]

        if ('webserver' in self.config):
            self._webserver = self.config["webserver"]

        self.user_model = Model(self.height, self.width, 'black')
        self.touch = CapacitiveTouch(config, self.height, self.width)
        self.sdl_lock = RLock(
        )  # Temporary hack to lock pygame calls using SDL before we create a centralized event manager for joystick and so on

        self.events = Events(self, True)

        self._models = {'user': self.user_model, 'touch': self.touch.model}

        # Start connection to real, simulated, or network LED table
        self.arbasim = None
        self.arbalink = None
        self.arbaclient = None

        if self._simulation:
            print("Launching simulation for %d x %d with a factor of %d" %
                  (self.height, self.width, factor_sim))
            self.arbasim = Simulator(self, self.height * factor_sim,
                                     self.width * factor_sim)

        if self._hardware:
            self.arbalink = Arbalink.factory(self)

        if len(self._server) > 0:
            server = self._server.split(':')
            if len(server) == 2:
                self.arbaclient = Arbaclient(self, server[0], int(server[1]))
            elif len(server) == 1:
                self.arbaclient = Arbaclient(self, server[0])
            else:
                raise ValueError('Incorrect server address, use ip:port or ip')
        if ('webserver' in self.config):
            webserver = self.config["webserver"].split(':')
            if len(webserver) == 2:
                self.webserver = Arbaweb(self, webserver[0], int(webserver[1]))
            elif len(webserver) == 1:
                self.webserver = Arbaweb(self, webserver[0])
            else:
                raise ValueError(
                    'Incorrect webserver address, use ip:port or ip')
Exemplo n.º 59
0
def readConfigData(section, key):
    config = RawConfigParser()
    config.read("./ConfigurationFiles/Config.cfg")
    return config.get(section, key)
Exemplo n.º 60
0
def fetchElementLocators(section, key):
    config = RawConfigParser()
    config.read("./ConfigurationFiles/Elements.cfg")
    return config.get(section, key)