예제 #1
0
파일: i18n.py 프로젝트: edupals/omnitux
def get_text_from_dict(dict, language):

    text_result = ""

    try:
        text_result = dict[language]
    except KeyError:
        common.warn("Missing translation in language " + language)

        try:
            text_result = dict[constants.fallback_language]

        except KeyError:

            for lang in constants.supported_languages:
                try:
                    text_result = dict[language]
                except KeyError:
                    foo = 0
                finally:
                    break

            if (text_result == ""):
                text_result = "Missing translation"

    return text_result
예제 #2
0
파일: i18n.py 프로젝트: Trilarion/omnitux2
def get_text_from_dict(dict, language) :
	
	text_result = ""
	
	try :
		text_result = dict[language] 
	except KeyError :
		common.warn("Missing translation in language "+language)
		
		try :
			text_result = dict[constants.fallback_language]
			
		except KeyError :
			
			for lang in constants.supported_languages :
				try :
					text_result = dict[language]
				except KeyError :
					foo = 0
				finally :
					break
					
			if (text_result == "") :
				text_result = "Missing translation"			
	
	return text_result
예제 #3
0
    def _get_files(self):
        extensions = [
            "aac", "ac3", "avi", "aiff", "bat", "bmp", "exe", "flac", "gif",
            "jpeg", "jpg", "mov", "m3u", "m4p", "mp2", "mp3", "mp4", "mpeg4",
            "midi", "msi", "ogg", "png", "txt", "sh", "wav", "wma", "vqf"
        ]

        common.warn("Only searching for prohibited files in user directories!")
        if "Linux" in plugin.get_os():
            directory = "/home"
        elif "Windows" in plugin.get_os():
            directory = "C:\\Users"
        else:
            return []

        common.info(
            "Searching {} for prohibited files. This may take a while...")

        files = []

        for extension in extensions:
            x = glob.glob(os.path.join(directory, "**/*." + extension),
                          recursive=True)
            files.extend(x)

        return files
예제 #4
0
def _list_ubuntu_packages():
    output = common.run("apt list --installed")
    package_list = output.split("\n")
    packages = []

    for p in package_list:
        p = p.strip()

        # Skip empty package names or the string 'Listing...'
        if p == "" or p == "Listing...":
            continue

        if re.search(r".*\/.* \[.*\]", p) is None:
            common.warn("Unexpected package: {}".format(p))
            continue

        # Get text up to first '/' (package name)
        name = p.partition("/")[0]
        # Get package flags
        flags = re.search(r".*\/.* \[(.*)\]", p).group(1).split(",")

        # If the package was installed automatically (i.e. system package OR dependency)
        if "automatic" in flags:
            continue

        packages.append(name)

    return packages
예제 #5
0
파일: i18n.py 프로젝트: Trilarion/omnitux2
def get_text(language, key, variables = None) :
	
	text_result = ""
	
	try :
		text_result = text[language][key]
		
	except KeyError :
		common.warn("Missing translation in language " + language + " for key " + str(key))

		try :
			text_result = text[constants.fallback_language][key]
			
		except KeyError :
			for lang in constants.supported_languages :
				try :
					text_result = text[lang][key]
				except KeyError :
					foo = 0
				finally :
					break
					
			if (text_result == "") :
				text_result = "Missing translation in i18n.py file"
	
	if (variables != None) :
		index = 0
		for variable in variables :
			(before, foo, after) = text_result.rpartition("[" + str(index) + "]")
			text_result = before + variable + after
			
			index = index + 1

	return text_result
예제 #6
0
 def cancel_snapshot(self):
     if not self.new_session:
         warn("Tried to cancel non-active new snapshot")
         return
     try:
         self.new_session.cancel()
     finally:
         self.new_session = None
예제 #7
0
파일: Debug.py 프로젝트: zztalker/ulipad
 def traceback(self, tb=None):
     if tb:
         message = traceback.format_exception(*tb)
     else:
         message = traceback.format_exception(*sys.exc_info())
     self.output('[Traceback]', ''.join(message))
     if debug and debug.is_debug() and Globals.mainframe and Globals.mainframe.IsShown():
         common.warn("There is some thing wrong as running the program")
예제 #8
0
파일: front.py 프로젝트: Frizlab/boar
 def cancel_snapshot(self):
     if not self.new_session:
         warn("Tried to cancel non-active new snapshot")
         return
     try:
         self.new_session.cancel()
     finally:
         self.new_session = None
예제 #9
0
파일: Debug.py 프로젝트: LaoMa3953/ulipad
 def traceback(self, tb=None):
     if tb:
         message = traceback.format_exception(*tb)
     else:
         message = traceback.format_exception(*sys.exc_info())
     self.output('[Traceback]', ''.join(message))
     if debug and debug.is_debug() and Globals.mainframe and Globals.mainframe.IsShown():
         common.warn("There is some thing wrong as running the program")
예제 #10
0
 def _readFile(self, fileLocation):
     if xbmcvfs.exists(fileLocation):
         try:
             self.doc = minidom.parse(fileLocation)
             self.settingsFile = fileLocation
         except ExpatError as err:
             common.warn("Can't read " + fileLocation + ": %s" % str(err),
                         "SettingsManager")
예제 #11
0
파일: pam.py 프로젝트: limefax/centsecure
    def _set_password_lockout(self):
        paths = ["/etc/pam.d/system-authand", "/etc/pam.d/password-auth"]
        for path in paths:
            if os.path.isfile(path):
                common.warn(
                    "{} exists, needs checking (password lockout)".format(
                        path))

        path = "/etc/pam.d/common-auth"
        common.backup(path)

        with open(path) as in_file:
            lines = in_file.read().split("\n")

        permit_index = None

        # used for fedora based distros
        #     text = """auth required pam_tally2.so onerr=fail audit silent deny=5 unlock_time=900
        # auth required pam_faillock.so preauth audit silent deny=5 unlock_time=900
        # auth sufficient pam_unix.so
        # auth [default=die] pam_faillock.so authfail audit deny=5unlock_time=900
        # auth sufficient pam_faillock.so authsucc audit deny=5 unlock_time=900"""

        text = """auth required pam_tally2.so onerr=fail audit silent deny=5 unlock_time=900"""

        for index, line in enumerate(lines):
            if "pam_faillock.so" in line:
                common.warn(
                    "Found faillock, needs checking (password lockout)")
            elif "pam_permit.so" in line:
                permit_index = index

        if text == lines[permit_index - 1]:
            common.debug("Tally already exists")
        elif permit_index is not None:
            lines.insert(permit_index, text)
        else:
            common.error("Error {} not formatted as expected".format(path))
            return

        with open(path, "w") as out_file:
            out_file.write("\n".join(lines))

        # Ensure user doesn't get locked out
        user = common.input_text("Enter current user")
        common.run("pam_tally2 -u {} --reset".format(user))
        # Everytime they use sudo the tally gets reset
        with open("/home/{}/.bashrc".format(user), "a") as out_file:
            out_file.write(
                "\nalias sudo='sudo pam_tally2 -u {} --reset >/dev/null; sudo '\n"
                .format(user))
        common.reminder(
            "You need to reload .bashrc in all currently used terminals: source ~/.bashrc"
        )

        common.debug("Set Password Lockout")
예제 #12
0
def check_fuel_plugins_dir(dir):
    msg = None
    if not dir:
        msg = 'Fuel Plugins Directory not specified!'
    elif not os.path.isdir(dir):
        msg = 'Fuel Plugins Directory does not exist!'
    elif not os.listdir(dir):
        msg = 'Fuel Plugins Directory is empty!'
    if msg:
        warn('%s No external plugins will be installed!' % msg)
예제 #13
0
def check_fuel_plugins_dir(dir):
    msg = None
    if not dir:
        msg = 'Fuel Plugins Directory not specified!'
    elif not os.path.isdir(dir):
        msg = 'Fuel Plugins Directory does not exist!'
    elif not os.listdir(dir):
        msg = 'Fuel Plugins Directory is empty!'
    if msg:
        warn('%s No external plugins will be installed!' % msg)
예제 #14
0
def _move(entry, source_root_path, dest_root_path):
    entry = entry.strip("/")
    source_path = join(source_root_path, entry)
    dest_path = join(dest_root_path, entry)

    if not exists(source_path):
        warn("entry does not exist: " + entry)
        return

    mkdir(dirname(dest_path))
    shutil.move(source_path, dest_path)
예제 #15
0
    def execute(self):
        """Secures Network."""
        common.warn("Assuming this is a host machine")

        self._secure_ipv4()
        self._secure_ipv6()
        self._enable_tcp_wrappers()
        self._configure_hosts()
        self._secure_protocols()
        self._secure_firewall()
        self._remove_interfaces()
예제 #16
0
def _move(entry, source_root_path, dest_root_path):
    entry = entry.strip("/")
    source_path = join(source_root_path, entry)
    dest_path = join(dest_root_path, entry)

    if not exists(source_path):
        warn("entry does not exist: " + entry)
        return

    mkdir(dirname(dest_path))
    shutil.move(source_path, dest_path)
예제 #17
0
 def _nice(self):
     if self.__NICE is None:
         self.__NICE = 10
     if self.__NICE < -20:
         self.__NICE = -20
     if self.__NICE > 19:
         self.__NICE = 19
     try:
         os.nice(self.__NICE)
         common.notice(
             'Set lowest priority for the global process execution')
     except Exception as e:
         common.warn('Setting niceness failed: %s' % str(e))
예제 #18
0
 def run(self, mode=-1):
     result = True
     if not common.any2bool(
             xbmc.getInfoLabel("Window(%s).Property(%s)" %
                               (10000, "SystemRecovery.Running"))):
         # set windows setting to true
         window = xbmcgui.Window(10000)
         window.setProperty("SystemRecovery.Running", "true")
         if self.remoteFS.RootPath is not None and self.remoteFS.RootPath != '':
             common.debug(
                 "Local directory: " + self.localFS.RootPath +
                 ", Remote directory: " + self.remoteFS.RootPath,
                 "SystemRecovery")
             if mode == self.Backup:
                 if common.setting("compress_backups"):
                     # delete old temp file
                     if self.localFS.exists(
                             common.path('special://temp/backup.zip')):
                         self.localFS.rmfile(
                             common.path('special://temp/backup.zip'))
                     # save the remote file system and use the zip vfs
                     self.savedRemoteFS = self.remoteFS
                     self.remoteFS = ZipFileSystem(
                         common.path("special://temp/backup.zip"), "w")
                 self.remoteFS.setRootPath(self.remoteFS.RootPath +
                                           time.strftime("%Y%m%d%H%M") +
                                           "/")
                 # run backup process
                 self.backup()
                 result = self.status >= 0
             elif mode == self.Restore:
                 if self.restorePoint.split('.')[-1] != 'zip':
                     self.remoteFS.setRootPath(self.remoteFS.RootPath +
                                               self.restorePoint + "/")
                 # run restore process
                 self.restore()
                 result = self.status >= 0
             else:
                 result = False
             # cleaning locations
             self.localFS.cleanup()
             self.remoteFS.cleanup()
         else:
             result = False
         # reset the window setting
         window.setProperty("SystemRecovery.Running", "")
     else:
         common.warn(
             'Script already running, no additional instance is needed')
         result = False
     return result
예제 #19
0
파일: ssh.py 프로젝트: limefax/centsecure
    def execute(self):
        """Execute plugin."""
        path = "/etc/ssh/sshd_config"
        if os.path.isfile(path):
            common.backup(path)
        else:
            common.info("{} not found, skipping SSH".format(path))
            return

        # set correct permissions
        common.run("chown root:root {}".format(path))
        common.run("chmod og-rwx {}".format(path))

        # some fancy commands that ensure correct permissions on private keys
        common.run_full("find /etc/ssh -xdev -type f -name 'ssh_host_*_key' -exec chown root:root {} \\;")
        common.run_full("find /etc/ssh -xdev -type f -name 'ssh_host_*_key' -exec chmod 0600 {} \\;")

        # some fancy commands that ensure correct permissions on public keys
        common.run_full("find /etc/ssh -xdev -type f -name 'ssh_host_*_key.pub' -exec chmod 0644 {} \\;")
        common.run_full("find /etc/ssh -xdev -type f -name 'ssh_host_*_key.pub' -exec chown root:root {} \\;")

        params = {
            "Protocol": "2",
            "LogLevel": "VERBOSE",
            "X11Forwarding": "no",
            "MaxAuthTries": "4",
            "IgnoreRhosts": "yes",
            "HostbasedAuthentication": "no",
            "PermitRootLogin": "******",
            "PermitEmptyPasswords": "no",
            "PermitUserEnvironment": "no",
            "Ciphers": "[email protected],[email protected],[email protected],aes256-ctr,aes192-ctr,aes128-ctr",
            "MACs": "[email protected],[email protected],hmac-sha2-512,hmac-sha2-256",
            "KexAlgorithms": "[email protected],ecdh-sha2-nistp521,ecdh-sha2-nistp384,ecdh-sha2-nistp256,diffie-hellman-group-exchange-sha256",
            "ClientAliveInterval": "300",
            "ClientAliveCountMax": "0",
            "LoginGraceTime": "60",
            "Banner": "/etc/issue.net",
            "UsePAM": "yes",
            "AllowTcpForwarding": "no",
            "maxstartups": "10:30:60",
            "MaxSessions": "4"
        }

        common.change_parameters(path, params)

        common.warn("Not doing anything about ssh access, (groups, users)")
예제 #20
0
def get(url, params=None):
    try:
        t0 = datetime.now()
        if params and 'pair' in params.keys():
            com.debug('---------- currency pair : {0}'.format(params['pair']))
        com.debug('{0} executed'.format(url))
        r = requests.get(url, params)
        if r.status_code == 200:
            com.debug('200 OK response: {0}'.format(r.text))
            return r.json()
        else:
            com.warn('status code : {0:d}'.format(r.status_code))
            return False
    except Exception as e:
        com.error(str(e))
        com.error(traceback.format_exc())
        return False
    finally:
        t1 = datetime.now()
        delta = t1 - t0
        com.info('get {0} exec time : {1:d}.{2:06d}'.format(
            url, delta.seconds, delta.microseconds))
예제 #21
0
파일: boarserve.py 프로젝트: harmy/boar
def run_socketserver(repopath, address, port):
    repository.Repo(repopath)  # Just check if the repo path is valid

    class BoarTCPHandler(SocketServer.BaseRequestHandler):
        def handle(self):
            to_client = self.request.makefile(mode="wb")
            from_client = self.request.makefile(mode="rb")
            PipedBoarServer(repopath, from_client, to_client).serve()

    if "fork" not in dir(os):
        warn(
            "Your operating system does not support the 'fork()' system call. This server will only be able to handle one client at a time. Please see the manual on how to set up a server on your operating system to handle multiple clients."
        )
        server = SocketServer.TCPServer((address, port), BoarTCPHandler)
    else:
        server = ForkingTCPServer((address, port), BoarTCPHandler)

    ip = server.socket.getsockname()[0]
    if ip == "0.0.0.0":
        ip = socket.gethostname()
    print "Serving repository %s as boar://%s:%s/" % (repopath, ip, port)
    server.serve_forever()
예제 #22
0
    def execute(self):
        """Remove the packages."""
        programs = _list_windows_programs()

        # As this will take lots of manual labour, ask if they would like to check each program.
        check = common.input_yesno(
            "Found {} programs. Would you like to manually check them".format(
                len(programs)))

        if check is False:
            return

        i = 0
        for program in programs:
            i += 1

            if program["UninstallString"] is None:
                common.warn(
                    "The program '{}' (by '{}') cannot be automatically removed. If it is of concern please remove it manually."
                    .format(program["DisplayName"], program["Publisher"]))
                continue

            if _check_whitelist(program):
                common.debug(
                    "The program '{}' (by '{}') is being skipped as it is whitelisted."
                    .format(program["DisplayName"], program["Publisher"]))
                continue

            keep = common.input_yesno(
                "({}/{}) Would you like to keep the program '{}' (by '{}')".
                format(i, len(programs), program["DisplayName"],
                       program["Publisher"]))
            if not keep:
                common.run_full(program["UninstallString"])

        common.debug("Removed packages!")
예제 #23
0
def find_libraries_and_bundles(fw4spl_projects):
    global g_libs
    global g_bundles

    g_libs = []
    g_bundles = []

    for project_dir in fw4spl_projects:
        if not os.path.isdir(project_dir):
            common.warn("%s isn't a valid directory." % project_dir)
            continue
        for root, dirs, files in os.walk(project_dir):
            rootdir = os.path.split(root)[1]
            # Do not inspect hidden folders
            if not rootdir.startswith("."):
                for file in files:
                    if file == "CMakeLists.txt":
                        if re.match('.*Bundles', root):
                            g_bundles += [rootdir.encode()]
                        elif re.match('.*SrcLib', root):
                            g_libs += [rootdir.encode()]

    g_libs.sort()
    g_bundles.sort()
예제 #24
0
파일: i18n.py 프로젝트: edupals/omnitux
def get_text(language, key, variables=None):

    text_result = ""

    try:
        text_result = text[language][key]

    except KeyError:
        common.warn("Missing translation in language " + language +
                    " for key " + str(key))

        try:
            text_result = text[constants.fallback_language][key]

        except KeyError:
            for lang in constants.supported_languages:
                try:
                    text_result = text[lang][key]
                except KeyError:
                    foo = 0
                finally:
                    break

            if (text_result == ""):
                text_result = "Missing translation in i18n.py file"

    if (variables != None):
        index = 0
        for variable in variables:
            (before, foo,
             after) = text_result.rpartition("[" + str(index) + "]")
            text_result = before + variable + after

            index = index + 1

    return text_result
예제 #25
0
def run(plugins=[]):
    """Runs the plugins.

    Args:
        plugins (list[str], optional): An option list of specific plugins to run

    """
    all_plugins = plugin.Plugin._registry
    failures = []

    # Sort plugins in priority order
    all_plugins.sort(key=lambda x: x.priority)

    for p in all_plugins:
        if plugin_slug(p) in plugins:
            debug("Plugin: {} (targets {} version {}, priority {})".format(
                p.name, p.os, p.os_version, p.priority))

            if plugin.os_check(p.os, p.os_version):
                instance = p()
                info("Running {}...".format(p.name))

                try:
                    instance.execute()
                except Exception as ex:
                    reminder("The plugin {} failed to run".format(p.name), ex)
                    failures.append(plugin_slug(p))
            else:
                warn("Not running {} as this is not the right OS".format(
                    p.name))
        else:
            warn("Skipping {}".format(p.name))

    reminder(
        "To run all of the failed plugins again, execute CentSecure with the following argument: '-r {}'"
        .format(" ".join(failures)))
예제 #26
0
def main():
    """Main function."""
    # Need to get plugins first for arguments to function
    plugin.find_plugins()

    parser = argparse.ArgumentParser(
        description="Automatically fixes common security vulnerabilities.",
        epilog="Default behaviour is to attempt to run all plugins")
    parser.add_argument("--list-plugins",
                        "-l",
                        action="store_true",
                        help="Lists all plugins",
                        dest="list_plugins")
    parser.add_argument("--run-plugin",
                        "-r",
                        "-p",
                        choices=get_plugins(),
                        nargs="+",
                        metavar="N",
                        help="Run specific plugins",
                        dest="plugins")
    parser.add_argument("--run-all",
                        "-R",
                        action="store_true",
                        help="Run all available plugins",
                        dest="run_all")
    parser.add_argument("--disable-root-check",
                        "--no-root",
                        "-d",
                        action="store_true",
                        help="Disable root check",
                        dest="no_root_check")
    parser.add_argument("--disable-python-check",
                        action="store_true",
                        help="Disable Python version check",
                        dest="disable_python_check")
    args = parser.parse_args()

    info("Welcome to CentSecure!")
    debug("This computer is running {} version {}".format(
        plugin.get_os(), plugin.get_os_version()))

    if args.list_plugins:
        plugins = get_plugins()
        for p in plugins:
            stdout("- {}".format(p))
        sys.exit(0)

    if not args.disable_python_check and not _check_python_version():
        warn(
            "CentSecure requires Python 3.7.x, you are using {}. Use the option --disable-python-check to bypass."
            .format(python_version()))
        sys.exit(1)

    firsttime.run_all()

    if args.run_all:
        to_run = get_plugins()
    elif args.plugins is not None:
        to_run = args.plugins
    else:
        to_run = get_default_plugins()

    if is_admin() or args.no_root_check:
        debug("Running CentSecure with the following {} plugins: {}".format(
            len(to_run), ", ".join(to_run)))
        run(to_run)
    else:
        warn(
            "CentSecure should be run as root or administator. Use the option --disable-root-check to bypass."
        )
        sys.exit(1)
예제 #27
0
def update_cert_data(prefix, dirname, cert_data, name_header):
    """Update certificate/ca data."""

    # pylint: disable=too-many-locals,too-many-branches,too-many-statements; there are many extensions

    cert_values = {
        'subject': [(
            name_header,
            'Subject',
        )],
        'issuer': [(
            name_header,
            'Issuer',
        )],
        'aia': [(name_header, 'Critical', 'Values')],
        'aki':
        [(name_header, 'Critical', 'Key identifier', 'Issuer', 'Serial')],
        'basicconstraints': [(name_header, 'Critical', 'CA', 'Path length')],
        'eku': [(name_header, 'Critical', 'Usages')],
        'key_usage': [[
            name_header,
            'Critical',
            'digital_signature',
            'content_commitment',
            'key_encipherment',
            'data_encipherment',
            'key_agreement',
            'key_cert_sign',
            'crl_sign',
            'encipher_only',
            'decipher_only',
        ]],
        'ian': [(name_header, 'Critical', 'Names')],
        'ski': [(name_header, 'Critical', 'Digest')],
        'certificatepolicies': [(name_header, 'Critical', 'Policies')],
        'crldp':
        [(name_header, 'Critical', 'Names', 'RDNs', 'Issuer', 'Reasons')],
        'sct': [(name_header, 'Critical', 'Value')],
        'nc': [(name_header, 'Critical', 'Permitted', 'Excluded')],
        'unknown': [(name_header, 'Extensions')],
    }
    exclude_empty_lines = {
        'unknown',
    }

    for cert_filename in sorted(
            os.listdir(dirname),
            key=lambda f: cert_data.get(f, {}).get('name', '')):
        if cert_filename not in cert_data:
            common.warn('Unknown %s: %s' % (prefix, cert_filename))
            continue
        print('Parsing %s (%s)...' % (cert_filename, prefix))

        cert_name = cert_data[cert_filename]['name']

        this_cert_values = {}
        for cert_key in cert_values:
            this_cert_values[cert_key] = ['']

        with open(os.path.join(dirname, cert_filename), 'rb') as cert_stream:
            cert = x509.load_pem_x509_certificate(cert_stream.read(),
                                                  backend=default_backend())

        this_cert_values['subject'] = [format_name(cert.subject)]
        this_cert_values['issuer'] = [format_name(cert.issuer)]

        for cert_ext in cert.extensions:
            value = cert_ext.value
            critical = '✓' if cert_ext.critical else '✗'

            if isinstance(value, x509.AuthorityInformationAccess):
                this_cert_values['aia'] = [
                    critical,
                    '\n'.join([
                        '* %s: %s' % (
                            v.access_method._name,  # pylint: disable=protected-access; only way to get name
                            format_general_name(v.access_location))
                        for v in value
                    ])
                ]
            elif isinstance(value, x509.AuthorityKeyIdentifier):
                this_cert_values['aki'] = [
                    critical,
                    bytes_to_hex(value.key_identifier),
                    optional(value.authority_cert_issuer, format_general_name,
                             '✗'),
                    optional(value.authority_cert_serial_number, fallback='✗'),
                ]
            elif isinstance(value, x509.BasicConstraints):
                this_cert_values['basicconstraints'] = [
                    critical,
                    value.ca,
                    value.path_length
                    if value.path_length is not None else 'None',
                ]
            elif isinstance(value, x509.CRLDistributionPoints):
                this_cert_values['crldp'] = []
                for distribution_point in value:
                    full_name = '* '.join([
                        format_general_name(name)
                        for name in distribution_point.full_name
                    ]) if distribution_point.full_name else '✗'
                    issuer = '* '.join([
                        format_general_name(name)
                        for name in distribution_point.crl_issuer
                    ]) if distribution_point.crl_issuer else '✗'
                    reasons = ', '.join([
                        r.name for r in distribution_point.reasons
                    ]) if distribution_point.reasons else '✗'

                    relative_name = format_name(
                        distribution_point.relative_name
                    ) if distribution_point.relative_name else '✗'
                    this_cert_values['crldp'].append([
                        critical,
                        full_name,
                        relative_name,
                        issuer,
                        reasons,
                    ])
            elif isinstance(value, x509.CertificatePolicies):
                policies = []

                for policy in value:
                    policy_name = policy.policy_identifier.dotted_string
                    if policy.policy_qualifiers is None:
                        policies.append('* %s' % policy_name)
                    elif len(policy.policy_qualifiers) == 1:
                        policies.append(
                            '* %s: %s' %
                            (policy_name,
                             policy_as_str(policy.policy_qualifiers[0])))
                    else:
                        qualifiers = '\n'.join([
                            '  * %s' % policy_as_str(p)
                            for p in policy.policy_qualifiers
                        ])
                        policies.append('* %s:\n\n%s\n' %
                                        (policy_name, qualifiers))

                this_cert_values['certificatepolicies'] = [
                    critical, '\n'.join(policies)
                ]
            elif isinstance(value, x509.ExtendedKeyUsage):
                this_cert_values['eku'] = [
                    critical,
                    ', '.join([u._name for u in value]),  # pylint: disable=protected-access
                ]
            elif isinstance(value, x509.IssuerAlternativeName):
                this_cert_values['ian'] = [
                    critical,
                    '* '.join([format_general_name(v) for v in value]),
                ]
            elif isinstance(value, x509.KeyUsage):
                key_usages = []
                for key in cert_values['key_usage'][0][2:]:
                    try:
                        key_usages.append('✓' if getattr(value, key) else '✗')
                    except ValueError:
                        key_usages.append('✗')

                this_cert_values['key_usage'] = [
                    critical,
                ] + key_usages
            elif isinstance(value, x509.NameConstraints):
                permitted = '\n'.join([
                    '* %s' % format_general_name(n)
                    for n in value.permitted_subtrees
                ]) if value.permitted_subtrees else '✗'
                excluded = '\n'.join([
                    '* %s' % format_general_name(n)
                    for n in value.excluded_subtrees
                ]) if value.excluded_subtrees else '✗'
                this_cert_values['nc'] = [critical, permitted, excluded]
            elif isinstance(value,
                            x509.PrecertificateSignedCertificateTimestamps):
                this_cert_values['sct'] = [
                    critical, '\n'.join([
                        '* Type: %s, version: %s' %
                        (e.entry_type.name, e.version.name) for e in value
                    ])
                ]
            elif isinstance(value, x509.SubjectKeyIdentifier):
                this_cert_values['ski'] = [
                    critical, bytes_to_hex(value.digest)
                ]
            elif isinstance(value, x509.SubjectAlternativeName):
                continue  # not interesting here
            else:
                # These are some OIDs identified by OpenSSL cli as "Netscape Cert Type" and
                # "Netscape Comment". They only occur in the old, discontinued StartSSL root
                # certificate.
                if cert_ext.oid.dotted_string == '2.16.840.1.113730.1.1':
                    name = 'Netscape Cert Type'
                elif cert_ext.oid.dotted_string == '2.16.840.1.113730.1.13':
                    name = "Netscape Comment"
                else:
                    name = cert_ext.oid._name  # pylint: disable=protected-access; only way to get name

                ext_str = '%s (Critical: %s, OID: %s)' % (
                    name, cert_ext.critical, cert_ext.oid.dotted_string)
                this_cert_values['unknown'].append(ext_str)

        this_cert_values['unknown'] = [
            '\n'.join(['* %s' % v for v in this_cert_values['unknown'][1:]])
        ]

        for key, row in this_cert_values.items():
            if isinstance(row[0], list):
                cert_values[key].append([cert_name] + row[0])
                for mrow in row[1:]:
                    cert_values[key].append(['', ''] + mrow[1:])
            else:
                cert_values[key].append([cert_name] + row)

    for name, values in cert_values.items():
        cert_filename = os.path.join(out_base, '%s_%s.rst' % (prefix, name))

        if name in exclude_empty_lines:
            values = [v for v in values if ''.join(v[1:])]

        if values:
            table = tabulate(values, headers='firstrow', tablefmt='rst')
        else:
            table = ''

        with open(cert_filename, 'w') as stream:
            stream.write(table)
예제 #28
0
 def warn(self, txt, code=None):
     common.warn(txt, code=code)
예제 #29
0
def update_cert_data(prefix, dirname, cert_data, name_header):
    """Update certificate/ca data."""

    # pylint: disable=too-many-locals,too-many-branches,too-many-statements; there are many extensions

    cert_values = {
        "subject": [(
            name_header,
            "Subject",
        )],
        "issuer": [(
            name_header,
            "Issuer",
        )],
        "aia": [(name_header, "Critical", "Values")],
        "aki":
        [(name_header, "Critical", "Key identifier", "Issuer", "Serial")],
        "basicconstraints": [(name_header, "Critical", "CA", "Path length")],
        "eku": [(name_header, "Critical", "Usages")],
        "key_usage": [[name_header, "Critical"] +
                      sorted(KeyUsage.CRYPTOGRAPHY_MAPPING.keys())],
        "ian": [(name_header, "Critical", "Names")],
        "ski": [(name_header, "Critical", "Digest")],
        "certificatepolicies": [(name_header, "Critical", "Policies")],
        "crldp":
        [(name_header, "Critical", "Names", "RDNs", "Issuer", "Reasons")],
        "sct": [(name_header, "Critical", "Value")],
        "nc": [(name_header, "Critical", "Permitted", "Excluded")],
        "unknown": [(name_header, "Extensions")],
    }
    exclude_empty_lines = {
        "unknown",
    }

    for cert_filename in sorted(
            os.listdir(dirname),
            key=lambda f: cert_data.get(f, {}).get("name", "")):
        if cert_filename not in cert_data:
            common.warn("Unknown %s: %s" % (prefix, cert_filename))
            continue
        print("Parsing %s (%s)..." % (cert_filename, prefix))

        cert_name = cert_data[cert_filename]["name"]

        this_cert_values = {}
        for cert_key in cert_values:
            this_cert_values[cert_key] = [""]

        with open(os.path.join(dirname, cert_filename), "rb") as cert_stream:
            cert = x509.load_pem_x509_certificate(cert_stream.read(),
                                                  backend=default_backend())

        this_cert_values["subject"] = ["``%s``" % format_name(cert.subject)]
        this_cert_values["issuer"] = ["``%s``" % format_name(cert.issuer)]

        for cert_ext in cert.extensions:
            value = cert_ext.value
            critical = "✓" if cert_ext.critical else "✗"

            if isinstance(value, x509.AuthorityInformationAccess):
                this_cert_values["aia"] = [
                    critical,
                    "\n".join([
                        "* %s: %s" % (
                            v.access_method._name,  # pylint: disable=protected-access
                            format_general_name(v.access_location),
                        ) for v in value
                    ]),
                ]
            elif isinstance(value, x509.AuthorityKeyIdentifier):
                this_cert_values["aki"] = [
                    critical,
                    bytes_to_hex(value.key_identifier),
                    optional(value.authority_cert_issuer, format_general_name,
                             "✗"),
                    optional(value.authority_cert_serial_number, fallback="✗"),
                ]
            elif isinstance(value, x509.BasicConstraints):
                this_cert_values["basicconstraints"] = [
                    critical,
                    value.ca,
                    value.path_length
                    if value.path_length is not None else "None",
                ]
            elif isinstance(value, x509.CRLDistributionPoints):
                this_cert_values["crldp"] = []
                for distribution_point in value:
                    full_name = ("* ".join([
                        format_general_name(name)
                        for name in distribution_point.full_name
                    ]) if distribution_point.full_name else "✗")
                    issuer = ("* ".join([
                        format_general_name(name)
                        for name in distribution_point.crl_issuer
                    ]) if distribution_point.crl_issuer else "✗")
                    reasons = (", ".join([
                        r.name for r in distribution_point.reasons
                    ]) if distribution_point.reasons else "✗")

                    relative_name = (format_name(
                        distribution_point.relative_name) if
                                     distribution_point.relative_name else "✗")
                    this_cert_values["crldp"].append([
                        critical,
                        full_name,
                        relative_name,
                        issuer,
                        reasons,
                    ])
            elif isinstance(value, x509.CertificatePolicies):
                policies = []

                for policy in value:
                    policy_name = policy.policy_identifier.dotted_string
                    if policy.policy_qualifiers is None:
                        policies.append("* %s" % policy_name)
                    elif len(policy.policy_qualifiers) == 1:
                        policies.append(
                            "* %s: %s" %
                            (policy_name,
                             policy_as_str(policy.policy_qualifiers[0])))
                    else:
                        qualifiers = "\n".join([
                            "  * %s" % policy_as_str(p)
                            for p in policy.policy_qualifiers
                        ])
                        policies.append("* %s:\n\n%s\n" %
                                        (policy_name, qualifiers))

                this_cert_values["certificatepolicies"] = [
                    critical, "\n".join(policies)
                ]
            elif isinstance(value, x509.ExtendedKeyUsage):
                this_cert_values["eku"] = [
                    critical,
                    ", ".join([u._name for u in value]),  # pylint: disable=protected-access
                ]
            elif isinstance(value, x509.IssuerAlternativeName):
                this_cert_values["ian"] = [
                    critical,
                    "* ".join([format_general_name(v) for v in value]),
                ]
            elif isinstance(value, x509.KeyUsage):
                key_usages = []
                for key in cert_values["key_usage"][0][2:]:
                    try:
                        key_usages.append(
                            "✓" if getattr(value, KeyUsage.
                                           CRYPTOGRAPHY_MAPPING[key]) else "✗")
                    except ValueError:
                        key_usages.append("✗")

                this_cert_values["key_usage"] = [
                    critical,
                ] + key_usages
            elif isinstance(value, x509.NameConstraints):
                permitted = ("\n".join([
                    "* %s" % format_general_name(n)
                    for n in value.permitted_subtrees
                ]) if value.permitted_subtrees else "✗")
                excluded = ("\n".join([
                    "* %s" % format_general_name(n)
                    for n in value.excluded_subtrees
                ]) if value.excluded_subtrees else "✗")
                this_cert_values["nc"] = [critical, permitted, excluded]
            elif isinstance(value,
                            x509.PrecertificateSignedCertificateTimestamps):
                this_cert_values["sct"] = [
                    critical,
                    "\n".join([
                        "* Type: %s, version: %s" %
                        (e.entry_type.name, e.version.name) for e in value
                    ]),
                ]
            elif isinstance(value, x509.SubjectKeyIdentifier):
                this_cert_values["ski"] = [
                    critical, bytes_to_hex(value.digest)
                ]
            elif isinstance(value, x509.SubjectAlternativeName):
                continue  # not interesting here
            else:
                # These are some OIDs identified by OpenSSL cli as "Netscape Cert Type" and
                # "Netscape Comment". They only occur in the old, discontinued StartSSL root
                # certificate.
                if cert_ext.oid.dotted_string == "2.16.840.1.113730.1.1":
                    name = "Netscape Cert Type"
                elif cert_ext.oid.dotted_string == "2.16.840.1.113730.1.13":
                    name = "Netscape Comment"
                else:
                    name = cert_ext.oid._name  # pylint: disable=protected-access; only way to get name

                ext_str = "%s (Critical: %s, OID: %s)" % (
                    name, cert_ext.critical, cert_ext.oid.dotted_string)
                this_cert_values["unknown"].append(ext_str)

        this_cert_values["unknown"] = [
            "\n".join(["* %s" % v for v in this_cert_values["unknown"][1:]])
        ]

        for key, row in this_cert_values.items():
            if isinstance(row[0], list):
                cert_values[key].append([cert_name] + row[0])
                for mrow in row[1:]:
                    cert_values[key].append(["", ""] + mrow[1:])
            else:
                cert_values[key].append([cert_name] + row)

    for name, values in cert_values.items():
        cert_filename = os.path.join(out_base, "%s_%s.rst" % (prefix, name))

        if name in exclude_empty_lines:
            values = [v for v in values if "".join(v[1:])]

        if values:
            table = tabulate(values, headers="firstrow", tablefmt="rst")
        else:
            table = ""

        with open(cert_filename, "w") as stream:
            stream.write(table)
예제 #30
0
def update_crl_data():  # pylint: disable=too-many-locals
    """Update CRL data."""
    crls = {
        "gdig2s1-1015.crl": {
            "info": "CRL in Go Daddy G2 end user certificates",
            "last": "2019-04-19",
            "name": "Go Daddy G2/user",
            "url": "http://crl.godaddy.com/gdig2s1-1015.crl",
        },
        "gdroot-g2.crl": {
            "info": "CRL in Go Daddy G2 intermediate CA",
            "last": "2019-04-19",
            "name": "Go Daddy G2/ca",
            "url": "http://crl.godaddy.com/gdroot-g2.crl",
        },
        "DSTROOTCAX3CRL.crl": {
            "info": "CRL in Let's Encrypt X3",
            "last": "2019-04-19",
            "name": "Let's Encrypt Authority X3/ca",
            "url": "http://crl.identrust.com/DSTROOTCAX3CRL.crl",
        },
        "root-r2.crl": {
            "info": "CRL in GlobalSign R2",
            "last": "2019-04-19",
            "name": "GlobalSign R2/ca",
            "url": "http://crl.globalsign.net/root-r2.crl",
        },
        "gsr2.crl": {
            "info": "CRL in Google G3 CA",
            "last": "2019-04-19",
            "name": "Google G3/ca",
            "url": "http://crl.pki.goog/gsr2/gsr2.crl",
        },
        "GTSGIAG3.crl": {
            "info": "CRL in Google G3 end user certificates",
            "last": "2019-04-19",
            "name": "Google G3/user",
            "url": "http://crl.pki.goog/GTSGIAG3.crl",
        },
        "comodo_ev_user.pem": {
            "info":
            "CRL in %s end user certificates" % certs["comodo_ev.pem"]["name"],
            "last":
            "2019-04-21",
            "name":
            "%s (user)" % cas["comodo_ev.pem"]["name"],
            "url":
            "http://crl.comodoca.com/COMODORSAExtendedValidationSecureServerCA.crl",
        },
        "digicert_ha_intermediate.crl": {
            "info": "CRL in %s" % cas["digicert_ha_intermediate.pem"]["name"],
            "last": "2019-04-21",
            "name": "%s/ca" % cas["digicert_ha_intermediate.pem"]["name"],
            "url":
            "http://crl4.digicert.com/DigiCertHighAssuranceEVRootCA.crl",
        },
        "digicert_ha_intermediate_user.crl": {
            "info":
            "CRL %s end user certificates" %
            cas["digicert_ha_intermediate.pem"]["name"],
            "last":
            "2019-04-21",
            "name":
            "%s/user" % certs["digicert_ha_intermediate.pem"]["name"],
            "url":
            "http://crl3.digicert.com/sha2-ha-server-g6.crl",
        },
        "trustid_server_a52_ca.crl": {
            "info": "CRL in %s" % cas["trustid_server_a52.pem"]["name"],
            "last": "2019-04-21",
            "name": "%s/ca" % cas["trustid_server_a52.pem"]["name"],
            "url": "http://validation.identrust.com/crl/commercialrootca1.crl",
        },
        "trustid_server_a52_user.crl": {
            "info":
            "CRL %s end user certificates" %
            cas["trustid_server_a52.pem"]["name"],
            "last":
            "2019-04-21",
            "name":
            "%s/user" % certs["trustid_server_a52.pem"]["name"],
            "url":
            "http://validation.identrust.com/crl/trustidcaa52.crl",
        },
    }

    crl_dir = os.path.join(docs_base, "_files", "crl")
    crl_values = {
        # meta data
        "crl_info": [("CRL", "Source", "Last accessed", "Info")],
        "crl_issuer": [("CRL", "Issuer Name")],
        "crl_data": [("CRL", "Update freq.", "hash")],
        # extensions
        "crl_aki": [("CRL", "key_identifier", "cert_issuer", "cert_serial")],
        "crl_crlnumber": [("CRL", "number")],
        "crl_idp": [
            (
                "CRL",
                "full name",
                "relative name",
                "only attr certs",
                "only ca certs",
                "only user certs",
                "reasons",
                "indirect CRL",
            ),
        ],
    }

    for crl_filename in sorted(os.listdir(crl_dir),
                               key=lambda f: crls.get(f, {}).get("name", "")):
        if crl_filename not in crls:
            common.warn("Unknown CRL: %s" % crl_filename)
            continue

        crl_name = crls[crl_filename]["name"]

        # set empty string as default value
        this_crl_values = {}
        for crl_key in crl_values:
            this_crl_values[crl_key] = [""] * (len(crl_values[crl_key][0]) - 1)

        with open(os.path.join(crl_dir, crl_filename), "rb") as crl_stream:
            crl = x509.load_der_x509_crl(crl_stream.read(),
                                         backend=default_backend())

        # add info
        this_crl_values["crl_info"] = (
            ":download:`%s </_files/crl/%s>` (`URL <%s>`__)" %
            (crl_filename, crl_filename, crls[crl_filename]["url"]),
            crls[crl_filename]["last"],
            crls[crl_filename]["info"],
        )

        # add data row
        this_crl_values["crl_data"] = (
            crl.next_update - crl.last_update,
            HASH_NAMES[type(crl.signature_hash_algorithm)],
        )
        this_crl_values["crl_issuer"] = ("``%s``" % format_name(crl.issuer), )

        # add extension values
        for ext in crl.extensions:
            value = ext.value

            if isinstance(value, x509.CRLNumber):
                this_crl_values["crl_crlnumber"] = (ext.value.crl_number, )
            elif isinstance(value, x509.IssuingDistributionPoint):
                this_crl_values["crl_idp"] = (
                    optional(
                        value.full_name, lambda v: "* ".join(
                            [format_general_name(n) for n in v]), "✗"),
                    optional(value.relative_name, format_name, "✗"),
                    "✓" if value.only_contains_attribute_certs else "✗",
                    "✓" if value.only_contains_ca_certs else "✗",
                    "✓" if value.only_contains_user_certs else "✗",
                    optional(value.only_some_reasons,
                             lambda v: ", ".join([f.name for f in v]), "✗"),
                    "✓" if value.indirect_crl else "✗",
                )
            elif isinstance(value, x509.AuthorityKeyIdentifier):
                crl_aci = optional(
                    value.authority_cert_issuer,
                    lambda v: "* ".join(
                        ["``%s``" % format_general_name(n) for n in v]),
                    "✗",
                )
                crl_acsn = optional(value.authority_cert_serial_number,
                                    fallback="✗")

                this_crl_values["crl_aki"] = (bytes_to_hex(
                    value.key_identifier), crl_aci, crl_acsn)
            else:
                common.warn("Unknown extension: %s" % ext.oid._name)  # pylint: disable=protected-access

        for crl_key, crl_row in this_crl_values.items():
            crl_values[crl_key].append([crl_name] + list(crl_row))

    # Finally, write CRL data to RST table
    for crl_name, crl_extensions in crl_values.items():
        crl_table = tabulate(crl_extensions,
                             headers="firstrow",
                             tablefmt="rst")
        with open(os.path.join(out_base, "%s.rst" % crl_name),
                  "w") as crl_table_stream:
            crl_table_stream.write(crl_table)
예제 #31
0
	def getImageList(self, params):
		common.trace("Starting to search images using parameters: %s" %str(params), "duckgo")
		images = []
		if params.get('mbid', '') == '':
			common.warn("No artist identified over MusicBrainz, search stopped")
			return images
		if "fullname" in params and not common.isempty(params['fullname']):
			keywords = params['fullname'] + " AND (singer OR band)"
		elif "alias" in params and not common.isempty(params['alias']):
			keywords = params['alias'] + " AND (singer OR band)"
		elif "artist" in params and not common.isempty(params['artist']):
			keywords = params['artist'] + " AND (singer OR band)"
		else:
			keywords = None
		if keywords is not None and "location" in params and not common.isempty(params['location']):
			keywords = keywords + " AND " + params['location']
		elif keywords is not None and "lang" in params and not common.isempty(params['lang']):
			keywords = keywords + " AND " + params['lang']
		if keywords is not None:
			payload = {'q': keywords}
			common.trace("Hitting DuckDuckGo for token", "duckgo")
			data = common.urlcall(self.DDG_SEARCH_URL, "POST", payload=payload)
			searchObj = re.search(r'vqd=([\d-]+)\&', data, re.M | re.I)
			if not searchObj:
				common.error("Token parsing failed!", "duckgo")
				return images
			else:
				common.debug("Obtained token: %s" % searchObj.group(1), "duckgo")
			headers = {
				'authority': 'duckduckgo.com',
				'accept': 'application/json, text/javascript, */*; q=0.01',
				'sec-fetch-dest': 'empty',
				'x-requested-with': 'XMLHttpRequest',
				'user-agent': common.agent(),
				'sec-fetch-site': 'same-origin',
				'sec-fetch-mode': 'cors',
				'referer': 'https://duckduckgo.com/'
			}
			payload = {
				"q": keywords,
				"vqd": searchObj.group(1),
				"v7exp": "a",
				"o": "json",
				"l": "wt-wt",
				"f": ",,,",
				"p": '1'
			}
			data = None
			while True:
				try:
					data = common.urlcall(self.DDG_SEARCH_URL + "i.js", headers=headers, payload=payload, output='json')
					break
				except ValueError as e:
					common.trace("Calling url failure; sleep and retry", "duckgo")
					common.sleep(500)
					continue
			index = 0
			max = common.any2int(params['limit'], 0)
			for obj in data["results"]:
				contextual = str(obj["title"].encode('utf-8')).lower().find(params['artist'].lower() + " ") >= 0
				dimension = int(obj["width"]) >= 876 if common.any2bool(params.get('getall', 'false')) else int(obj["width"]) >= 1920
				if contextual and dimension:
					index += 1
					images.append(obj["image"])
				if max > 0 and index >= max:
					break
		if not images:
			return []
		else:
			return self._delExclusions(images, params.get('exclusionsfile'))
예제 #32
0
def update_crl_data():  # pylint: disable=too-many-locals
    """Update CRL data."""
    crls = {
        'gdig2s1-1015.crl': {
            'info': 'CRL in Go Daddy G2 end user certificates',
            'last': '2019-04-19',
            'name': 'Go Daddy G2/user',
            'url': 'http://crl.godaddy.com/gdig2s1-1015.crl',
        },
        'gdroot-g2.crl': {
            'info': 'CRL in Go Daddy G2 intermediate CA',
            'last': '2019-04-19',
            'name': 'Go Daddy G2/ca',
            'url': 'http://crl.godaddy.com/gdroot-g2.crl',
        },
        'DSTROOTCAX3CRL.crl': {
            'info': 'CRL in Let\'s Encrypt X3',
            'last': '2019-04-19',
            'name': "Let's Encrypt Authority X3/ca",
            'url': 'http://crl.identrust.com/DSTROOTCAX3CRL.crl',
        },
        'root-r2.crl': {
            'info': 'CRL in GlobalSign R2',
            'last': '2019-04-19',
            'name': 'GlobalSign R2/ca',
            'url': 'http://crl.globalsign.net/root-r2.crl',
        },
        'gsr2.crl': {
            'info': 'CRL in Google G3 CA',
            'last': '2019-04-19',
            'name': 'Google G3/ca',
            'url': 'http://crl.pki.goog/gsr2/gsr2.crl',
        },
        'GTSGIAG3.crl': {
            'info': 'CRL in Google G3 end user certificates',
            'last': '2019-04-19',
            'name': 'Google G3/user',
            'url': 'http://crl.pki.goog/GTSGIAG3.crl',
        },
        'comodo_ev_user.pem': {
            'info':
            'CRL in %s end user certificates' % certs['comodo_ev.pem']['name'],
            'last':
            '2019-04-21',
            'name':
            '%s/user' % cas['comodo_ev.pem']['name'],
            'url':
            'http://crl.comodoca.com/COMODORSAExtendedValidationSecureServerCA.crl',
        },
        'digicert_ha_intermediate.crl': {
            'info': 'CRL in %s' % cas['digicert_ha_intermediate.pem']['name'],
            'last': '2019-04-21',
            'name': '%s/ca' % cas['digicert_ha_intermediate.pem']['name'],
            'url':
            'http://crl4.digicert.com/DigiCertHighAssuranceEVRootCA.crl',
        },
        'digicert_ha_intermediate_user.crl': {
            'info':
            'CRL %s end user certificates' %
            cas['digicert_ha_intermediate.pem']['name'],
            'last':
            '2019-04-21',
            'name':
            '%s/user' % certs['digicert_ha_intermediate.pem']['name'],
            'url':
            'http://crl3.digicert.com/sha2-ha-server-g6.crl',
        },
        'trustid_server_a52_ca.crl': {
            'info': 'CRL in %s' % cas['trustid_server_a52.pem']['name'],
            'last': '2019-04-21',
            'name': '%s/ca' % cas['trustid_server_a52.pem']['name'],
            'url': 'http://validation.identrust.com/crl/commercialrootca1.crl',
        },
        'trustid_server_a52_user.crl': {
            'info':
            'CRL %s end user certificates' %
            cas['trustid_server_a52.pem']['name'],
            'last':
            '2019-04-21',
            'name':
            '%s/user' % certs['trustid_server_a52.pem']['name'],
            'url':
            'http://validation.identrust.com/crl/trustidcaa52.crl',
        },
    }

    crl_dir = os.path.join(docs_base, '_files', 'crl')
    crl_values = {
        # meta data
        'crl_info': [('CRL', 'Source', 'Last accessed', 'Info')],
        'crl_issuer': [('CRL', 'Issuer Name')],
        'crl_data': [('CRL', 'Update freq.', 'hash')],

        # extensions
        'crl_aki': [('CRL', 'key_identifier', 'cert_issuer', 'cert_serial')],
        'crl_crlnumber': [('CRL', 'number')],
        'crl_idp': [
            (
                'CRL',
                'full name',
                'relative name',
                'only attr certs',
                'only ca certs',
                'only user certs',
                'reasons',
                'indirect CRL',
            ),
        ]
    }

    for crl_filename in sorted(os.listdir(crl_dir),
                               key=lambda f: crls.get(f, {}).get('name', '')):
        if crl_filename not in crls:
            common.warn('Unknown CRL: %s' % crl_filename)
            continue

        crl_name = crls[crl_filename]['name']

        # set empty string as default value
        this_crl_values = {}
        for crl_key in crl_values:
            this_crl_values[crl_key] = [''] * (len(crl_values[crl_key][0]) - 1)

        with open(os.path.join(crl_dir, crl_filename), 'rb') as crl_stream:
            crl = x509.load_der_x509_crl(crl_stream.read(),
                                         backend=default_backend())

        # add info
        this_crl_values['crl_info'] = (
            ':download:`%s </_files/crl/%s>` (`URL <%s>`__)' %
            (crl_filename, crl_filename, crls[crl_filename]['url']),
            crls[crl_filename]['last'],
            crls[crl_filename]['info'],
        )

        # add data row
        this_crl_values['crl_data'] = (
            crl.next_update - crl.last_update,
            crl.signature_hash_algorithm.name,
        )
        this_crl_values['crl_issuer'] = (format_name(crl.issuer), )

        # add extension values
        for ext in crl.extensions:
            value = ext.value

            if isinstance(value, x509.CRLNumber):
                this_crl_values['crl_crlnumber'] = (ext.value.crl_number, )
            elif isinstance(value, x509.IssuingDistributionPoint):
                this_crl_values['crl_idp'] = (
                    optional(
                        value.full_name, lambda v: '* '.join(
                            [format_general_name(n) for n in v]), '✗'),
                    optional(value.relative_name, format_name, '✗'),
                    '✓' if value.only_contains_attribute_certs else '✗',
                    '✓' if value.only_contains_ca_certs else '✗',
                    '✓' if value.only_contains_user_certs else '✗',
                    optional(value.only_some_reasons,
                             lambda v: ', '.join([f.name for f in v]), '✗'),
                    '✓' if value.indirect_crl else '✗',
                )
            elif isinstance(value, x509.AuthorityKeyIdentifier):
                crl_aci = optional(
                    value.authority_cert_issuer,
                    lambda v: '* '.join([format_general_name(n) for n in v]),
                    '✗')
                crl_acsn = optional(value.authority_cert_serial_number,
                                    fallback='✗')

                this_crl_values['crl_aki'] = (bytes_to_hex(
                    value.key_identifier), crl_aci, crl_acsn)
            else:
                common.warn('Unknown extension: %s' % ext.oid._name)  # pylint: disable=protected-access

        for crl_key, crl_row in this_crl_values.items():
            crl_values[crl_key].append([crl_name] + list(crl_row))

    # Finally, write CRL data to RST table
    for crl_name, crl_extensions in crl_values.items():
        crl_table = tabulate(crl_extensions,
                             headers='firstrow',
                             tablefmt='rst')
        with open(os.path.join(out_base, '%s.rst' % crl_name),
                  'w') as crl_table_stream:
            crl_table_stream.write(crl_table)
예제 #33
0
	def __init__(self, game, main_language, score) :

		xml_game_node = game.xml_game_node

		self.globalvars = game.globalvars

		# here we create a new game object to play with
		# thus the original game won't be altered

		common.Game.__init__(self, self.globalvars, xml_game_node, game.level, score)

		self.score.enable_count_good()
		self.score.enable_count_wrong()
		self.score.enable_record_time()


		self.xml_game_node = xml_game_node

		# reading global parameters
		xml_global_parameters_node = self.xml_game_node.getElementsByTagName("global_parameters")[0]
	
	
		# first of all, game setup
		xml_game_setup_node = self.xml_game_node.getElementsByTagName("game_setup")[0]
	
		# possible values for types :
		# image
		# image_on_map (only to associate)
		# text
		self.type_to_associate = xml_funcs.getText(xml_game_setup_node.getElementsByTagName("type_to_associate")[0])
		
		type_to_drag_node = xml_game_setup_node.getElementsByTagName("type_to_drag")[0]

		self.type_to_drag = xml_funcs.getText(type_to_drag_node)
		sort = type_to_drag_node.getAttribute("sort")
		
		if (sort == None) :
			self.sort = False
		elif (cmp(sort.lower(), "yes") == 0) :
			self.sort = True
		else :
			self.sort = False
		
		
	
		# the min and max amounts of associations (good + bad ones) which will become draggable items
		self.min_draggable = xml_funcs.getInt(xml_game_setup_node.getElementsByTagName("min_draggable")[0])
		self.max_draggable = xml_funcs.getInt(xml_game_setup_node.getElementsByTagName("max_draggable")[0])
	
	
		item_to_associate_parameters_nodes  = xml_global_parameters_node.getElementsByTagName("item_to_associate")
		
		self.item_to_associate_parameters = []
		for item_to_associate_parameters_node in item_to_associate_parameters_nodes :
			self.item_to_associate_parameters.append(xml_funcs.get_box(item_to_associate_parameters_node))


		draggable_items_area_nodes = xml_global_parameters_node.getElementsByTagName("draggable_items_area")
		self.draggable_items_areas = []
		
		for draggable_items_area_node in draggable_items_area_nodes :

			spacing_x_nodes = draggable_items_area_node.getElementsByTagName("spacing_x")
			spacing_y_nodes = draggable_items_area_node.getElementsByTagName("spacing_y")

			if (len(spacing_x_nodes) > 0) :
				spacing_x = xml_funcs.getFloat(spacing_x_nodes[0])
			else :
				spacing_x = 0

			if (len(spacing_y_nodes) > 0) :
				spacing_y = xml_funcs.getFloat(spacing_y_nodes[0])
			else :
				spacing_y = 0

			draggable_items_area = (xml_funcs.get_box(draggable_items_area_node),
						xml_funcs.getInt(draggable_items_area_node.getElementsByTagName("amount_x")[0]),
						xml_funcs.getInt(draggable_items_area_node.getElementsByTagName("amount_y")[0]),
						spacing_x,
						spacing_y)
			
			# TODO: make this under each area
			
			text_height_nodes = draggable_items_area_node.getElementsByTagName("font_height")
			if (len(text_height_nodes) > 0) :
				self.text_height = xml_funcs.getInt(text_height_nodes[0])
			else :
				self.text_height = None
			
			self.draggable_items_areas.append(draggable_items_area)
		

		# global placeholders where to drag items
		# only present for non-map associations
		goal_nodes = xml_global_parameters_node.getElementsByTagName("goal")

		self.goals = []
		for goal_node in goal_nodes :
		
			goal = Goal(goal_node)
		
			self.goals.append(goal)
		

		# space to display text legends
		text_legend_nodes = xml_global_parameters_node.getElementsByTagName("text_legend_area")

		if (len(text_legend_nodes) > 0) :
			self.text_legend_area = xml_funcs.get_box(text_legend_nodes[0])
		else :
			self.text_legend_area = None



		
		# Map information (only present if type_to_associate is "on_map")
		map_nodes = xml_global_parameters_node.getElementsByTagName("map")

		if (len(map_nodes) > 0) :
			map_node = map_nodes[0]
			
			(self.map_pos, self.map_size) = xml_funcs.get_box(map_node)
			
			map_filenames = map_node.getElementsByTagName("image")
			
			self.map_filename = xml_funcs.getText(map_filenames[0])



		# reading associations
		
		associations_node = self.xml_game_node.getElementsByTagName("associations")[0]
		
		associations = associations_node.getElementsByTagName("association")
		
		self.associations = []
		
		for association_node in associations :
			
			association = Association(self)
			
			image_nodes = association_node.getElementsByTagName("image")
			
	
			for image_node in image_nodes :
				
				if (image_node.parentNode == association_node) :
					# we ignore images which are not direct children
					# of the association (ie: images inside goal for instance)
				
					image_filename = xml_funcs.getText(image_node)
					

					if (cmp(image_node.getAttribute("type"), "") == 0) :
						if (cmp(self.type_to_associate, "image") == 0) :
							association.images_to_associate.append(image_filename)
						if (cmp(self.type_to_drag, "image") == 0) :
							association.images_to_drag.append(image_filename)
							
					elif (cmp(image_node.getAttribute("type"), "to_associate") == 0) :
						if ((cmp(self.type_to_associate, "image") == 0) or (cmp(self.type_to_associate, "image_on_map") == 0)) :
							association.images_to_associate.append(image_filename)
	
							if (cmp(self.type_to_associate, "image_on_map") == 0) :
								association.image_to_associate_pos_size = xml_funcs.get_box(image_node)
	
						else :
							common.warn(image_filename + " is supposed to be associated, but the game is not supposed to associate images")
							
					elif (cmp(image_node.getAttribute("type"), "to_drag") == 0) :
						if ((cmp(self.type_to_drag, "image") == 0) or (cmp(self.type_to_associate, "image_on_map") == 0)) :
							association.images_to_drag.append(image_filename)
						else :
							common.warn(image_filename + " is supposed to be dragged and dropped, but the game is not supposed to drag an drop images")
						
					# find potential associated sounds

					sound_nodes = image_node.getElementsByTagName("sound")
					
					for sound_node in sound_nodes :
						sound_node_lang = sound_node.getAttribute("lang")

						if ((cmp(sound_node_lang, "") == 0) or (cmp(sound_node_lang, main_language) == 0)) :

							association.append_image_sound(image_filename, xml_funcs.getText(sound_node))

					# find potential associated text legends
					# only texts with no lang tag or with lang tag = main_language are used
					text_legend_nodes = image_node.getElementsByTagName("text")
					
					for text_legend_node in text_legend_nodes :

						if ((cmp(text_legend_node.getAttribute("lang"), main_language) == 0) or (cmp(text_legend_node.getAttribute("key"), "") != 0)) :

							association.append_image_text_legend(image_filename, xml_funcs.getText(text_legend_node, self.i18n_dict, main_language))					

					
			
			text_nodes = association_node.getElementsByTagName("text")
			
			for text_node in text_nodes :

				if (text_node.parentNode == association_node) :
				
					text_lang = text_node.getAttribute("lang")

					text_should_be_added = False

					if (text_lang == "") :
						# if no lang attribute defined, the text is included
						text_should_be_added = True
					else :
						# if there is a lang attribute, we add the text only
						# if this language is the main language
						if (cmp(text_lang, main_language) == 0) :
							text_should_be_added = True

						# the text node might be a dictionary key, in this case we also add it :
						if (cmp(text_node.getAttribute("key"), "") != 0) :
							text_should_be_added = True
					

					if (text_should_be_added == True) :

						text = xml_funcs.getText(text_node, self.i18n_dict, main_language)
					
						association.texts.append(text)
					
						sound_nodes = text_node.getElementsByTagName("sound")
					
						for sound_node in sound_nodes :

							sound_node_lang = sound_node.getAttribute("lang")

							if ((cmp(sound_node_lang, "") == 0) or (cmp(sound_node_lang, main_language) == 0)) :
	
								association.append_text_sound(text, xml_funcs.getText(sound_node))
				
				
			# goals local to only one association
			
			goal_nodes = association_node.getElementsByTagName("goal")
			
			if (len(goal_nodes) > 0) :
			
				# TODO : allow for more than a goal ?
				goal_node = goal_nodes[0]
				
				if (cmp(self.type_to_associate, "image_on_map") == 0) :
					
					goal = Goal(goal_node)

					# TODO : remove from here ?				
					self.goals.append(goal)
					
					# TODO : put more than one goal
					association.associated_goal = goal
			
				else :
					common.warn("<goal> found inside an association whereas type to associate is not image_on_map")
			
			
			
			
			self.associations.append(association)
			
		
		self.associations = common.randomize_list(self.associations)
예제 #34
0
    def __init__(self, game, main_language, score):

        xml_game_node = game.xml_game_node

        self.globalvars = game.globalvars

        # here we create a new game object to play with
        # thus the original game won't be altered

        common.Game.__init__(self, self.globalvars, xml_game_node, game.level,
                             score)

        self.score.enable_count_good()
        self.score.enable_count_wrong()
        self.score.enable_record_time()

        self.xml_game_node = xml_game_node

        # reading global parameters
        xml_global_parameters_node = self.xml_game_node.getElementsByTagName(
            "global_parameters")[0]

        # first of all, game setup
        xml_game_setup_node = self.xml_game_node.getElementsByTagName(
            "game_setup")[0]

        # possible values for types :
        # image
        # image_on_map (only to associate)
        # text
        self.type_to_associate = xml_funcs.getText(
            xml_game_setup_node.getElementsByTagName("type_to_associate")[0])

        type_to_drag_node = xml_game_setup_node.getElementsByTagName(
            "type_to_drag")[0]

        self.type_to_drag = xml_funcs.getText(type_to_drag_node)
        sort = type_to_drag_node.getAttribute("sort")

        if (sort == None):
            self.sort = False
        elif (cmp(sort.lower(), "yes") == 0):
            self.sort = True
        else:
            self.sort = False

        # the min and max amounts of associations (good + bad ones) which will become draggable items
        self.min_draggable = xml_funcs.getInt(
            xml_game_setup_node.getElementsByTagName("min_draggable")[0])
        self.max_draggable = xml_funcs.getInt(
            xml_game_setup_node.getElementsByTagName("max_draggable")[0])

        item_to_associate_parameters_nodes = xml_global_parameters_node.getElementsByTagName(
            "item_to_associate")

        self.item_to_associate_parameters = []
        for item_to_associate_parameters_node in item_to_associate_parameters_nodes:
            self.item_to_associate_parameters.append(
                xml_funcs.get_box(item_to_associate_parameters_node))

        draggable_items_area_nodes = xml_global_parameters_node.getElementsByTagName(
            "draggable_items_area")
        self.draggable_items_areas = []

        for draggable_items_area_node in draggable_items_area_nodes:

            spacing_x_nodes = draggable_items_area_node.getElementsByTagName(
                "spacing_x")
            spacing_y_nodes = draggable_items_area_node.getElementsByTagName(
                "spacing_y")

            if (len(spacing_x_nodes) > 0):
                spacing_x = xml_funcs.getFloat(spacing_x_nodes[0])
            else:
                spacing_x = 0

            if (len(spacing_y_nodes) > 0):
                spacing_y = xml_funcs.getFloat(spacing_y_nodes[0])
            else:
                spacing_y = 0

            draggable_items_area = (
                xml_funcs.get_box(draggable_items_area_node),
                xml_funcs.getInt(
                    draggable_items_area_node.getElementsByTagName("amount_x")
                    [0]),
                xml_funcs.getInt(
                    draggable_items_area_node.getElementsByTagName("amount_y")
                    [0]), spacing_x, spacing_y)

            # TODO: make this under each area

            text_height_nodes = draggable_items_area_node.getElementsByTagName(
                "font_height")
            if (len(text_height_nodes) > 0):
                self.text_height = xml_funcs.getInt(text_height_nodes[0])
            else:
                self.text_height = None

            self.draggable_items_areas.append(draggable_items_area)

        # global placeholders where to drag items
        # only present for non-map associations
        goal_nodes = xml_global_parameters_node.getElementsByTagName("goal")

        self.goals = []
        for goal_node in goal_nodes:

            goal = Goal(goal_node)

            self.goals.append(goal)

        # space to display text legends
        text_legend_nodes = xml_global_parameters_node.getElementsByTagName(
            "text_legend_area")

        if (len(text_legend_nodes) > 0):
            self.text_legend_area = xml_funcs.get_box(text_legend_nodes[0])
        else:
            self.text_legend_area = None

        # Map information (only present if type_to_associate is "on_map")
        map_nodes = xml_global_parameters_node.getElementsByTagName("map")

        if (len(map_nodes) > 0):
            map_node = map_nodes[0]

            (self.map_pos, self.map_size) = xml_funcs.get_box(map_node)

            map_filenames = map_node.getElementsByTagName("image")

            self.map_filename = xml_funcs.getText(map_filenames[0])

        # reading associations

        associations_node = self.xml_game_node.getElementsByTagName(
            "associations")[0]

        associations = associations_node.getElementsByTagName("association")

        self.associations = []

        for association_node in associations:

            association = Association(self)

            image_nodes = association_node.getElementsByTagName("image")

            for image_node in image_nodes:

                if (image_node.parentNode == association_node):
                    # we ignore images which are not direct children
                    # of the association (ie: images inside goal for instance)

                    image_filename = xml_funcs.getText(image_node)

                    if (cmp(image_node.getAttribute("type"), "") == 0):
                        if (cmp(self.type_to_associate, "image") == 0):
                            association.images_to_associate.append(
                                image_filename)
                        if (cmp(self.type_to_drag, "image") == 0):
                            association.images_to_drag.append(image_filename)

                    elif (cmp(image_node.getAttribute("type"),
                              "to_associate") == 0):
                        if ((cmp(self.type_to_associate, "image") == 0)
                                or (cmp(self.type_to_associate, "image_on_map")
                                    == 0)):
                            association.images_to_associate.append(
                                image_filename)

                            if (cmp(self.type_to_associate,
                                    "image_on_map") == 0):
                                association.image_to_associate_pos_size = xml_funcs.get_box(
                                    image_node)

                        else:
                            common.warn(
                                image_filename +
                                " is supposed to be associated, but the game is not supposed to associate images"
                            )

                    elif (cmp(image_node.getAttribute("type"),
                              "to_drag") == 0):
                        if ((cmp(self.type_to_drag, "image") == 0) or (cmp(
                                self.type_to_associate, "image_on_map") == 0)):
                            association.images_to_drag.append(image_filename)
                        else:
                            common.warn(
                                image_filename +
                                " is supposed to be dragged and dropped, but the game is not supposed to drag an drop images"
                            )

                    # find potential associated sounds

                    sound_nodes = image_node.getElementsByTagName("sound")

                    for sound_node in sound_nodes:
                        sound_node_lang = sound_node.getAttribute("lang")

                        if ((cmp(sound_node_lang, "") == 0)
                                or (cmp(sound_node_lang, main_language) == 0)):

                            association.append_image_sound(
                                image_filename, xml_funcs.getText(sound_node))

                    # find potential associated text legends
                    # only texts with no lang tag or with lang tag = main_language are used
                    text_legend_nodes = image_node.getElementsByTagName("text")

                    for text_legend_node in text_legend_nodes:

                        if ((cmp(text_legend_node.getAttribute("lang"),
                                 main_language) == 0)
                                or (cmp(text_legend_node.getAttribute("key"),
                                        "") != 0)):

                            association.append_image_text_legend(
                                image_filename,
                                xml_funcs.getText(text_legend_node,
                                                  self.i18n_dict,
                                                  main_language))

            text_nodes = association_node.getElementsByTagName("text")

            for text_node in text_nodes:

                if (text_node.parentNode == association_node):

                    text_lang = text_node.getAttribute("lang")

                    text_should_be_added = False

                    if (text_lang == ""):
                        # if no lang attribute defined, the text is included
                        text_should_be_added = True
                    else:
                        # if there is a lang attribute, we add the text only
                        # if this language is the main language
                        if (cmp(text_lang, main_language) == 0):
                            text_should_be_added = True

                        # the text node might be a dictionary key, in this case we also add it :
                        if (cmp(text_node.getAttribute("key"), "") != 0):
                            text_should_be_added = True

                    if (text_should_be_added == True):

                        text = xml_funcs.getText(text_node, self.i18n_dict,
                                                 main_language)

                        association.texts.append(text)

                        sound_nodes = text_node.getElementsByTagName("sound")

                        for sound_node in sound_nodes:

                            sound_node_lang = sound_node.getAttribute("lang")

                            if ((cmp(sound_node_lang, "") == 0) or
                                (cmp(sound_node_lang, main_language) == 0)):

                                association.append_text_sound(
                                    text, xml_funcs.getText(sound_node))

            # goals local to only one association

            goal_nodes = association_node.getElementsByTagName("goal")

            if (len(goal_nodes) > 0):

                # TODO : allow for more than a goal ?
                goal_node = goal_nodes[0]

                if (cmp(self.type_to_associate, "image_on_map") == 0):

                    goal = Goal(goal_node)

                    # TODO : remove from here ?
                    self.goals.append(goal)

                    # TODO : put more than one goal
                    association.associated_goal = goal

                else:
                    common.warn(
                        "<goal> found inside an association whereas type to associate is not image_on_map"
                    )

            self.associations.append(association)

        self.associations = common.randomize_list(self.associations)
예제 #35
0
def codingstyle(files, enable_reformat, check_lgpl, check_commits_date):
    source_patterns = common.get_option('codingstyle-hook.source-patterns', default='*.cpp *.cxx *.c').split()
    header_patterns = common.get_option('codingstyle-hook.header-patterns', default='*.hpp *.hxx *.h').split()
    misc_patterns = common.get_option('codingstyle-hook.misc-patterns', default='*.cmake *.txt *.xml *.json').split()

    code_patterns = source_patterns + header_patterns
    include_patterns = code_patterns + misc_patterns

    sort_includes = common.get_option('codingstyle-hook.sort-includes', default="true", type='--bool') == "true"

    global repoRoot
    repoRoot = common.get_repo_root()

    if repoRoot is None:
        common.warn("Cannot find 'fw4spl' repository structure")
        parent_repo = ""
    else:
        parent_repo = os.path.abspath(os.path.join(repoRoot, os.pardir))

    fw4spl_configured_projects = common.get_option('codingstyle-hook.additional-projects', default=None)
    fw4spl_projects = []

    if fw4spl_configured_projects is None:
        # no additional-projects specified in config file. Default is parent repository folder
        fw4spl_projects.append(parent_repo)
    else:
        fw4spl_projects = fw4spl_configured_projects.split(";")
        # adds current repository folder to the additional-projects specified in config file.
        fw4spl_projects.append(repoRoot)
        # normalize pathname
        fw4spl_projects = list(map(os.path.normpath, fw4spl_projects))
        # remove duplicates
        fw4spl_projects = list(set(fw4spl_projects))

    global UNCRUSTIFY_PATH

    if common.g_uncrustify_path_arg is not None and len(common.g_uncrustify_path_arg) > 0:
        UNCRUSTIFY_PATH = common.g_uncrustify_path_arg
    else:
        UNCRUSTIFY_PATH = common.get_option('codingstyle-hook.uncrustify-path', default=UNCRUSTIFY_PATH,
                                            type='--path').strip()

    common.note('Using uncrustify: ' + UNCRUSTIFY_PATH)

    if common.execute_command(UNCRUSTIFY_PATH + ' -v -q').status != 0:
        common.error('Failed to launch uncrustify.\n')
        return []

    checked = set()

    reformatted_list = []
    sortincludes.find_libraries_and_bundles(fw4spl_projects)

    ret = False
    count = 0
    reformat_count = 0
    for f in files:
        if f in checked or not any(f.fnmatch(p) for p in include_patterns):
            continue

        content = f.contents
        if not common.binary(content):

            # Do this last because contents of the file will be modified by uncrustify
            # Thus the variable content will no longer reflect the real content of the file
            file_path = os.path.join(repoRoot, f.path)
            if os.path.isfile(file_path):
                res = format_file(file_path, enable_reformat, code_patterns, header_patterns, misc_patterns, check_lgpl,
                                  sort_includes, f.status, check_commits_date)
                count += 1
                if res == FormatReturn.Modified:
                    reformatted_list.append(f.path)
                    reformat_count += 1
                elif res == FormatReturn.Error:
                    # Error in reformatting
                    ret = True

        checked.add(f)

    common.note('%d file(s) checked, %d file(s) reformatted.' % (count, reformat_count))

    return ret, reformatted_list
예제 #36
0
 def run(self, index):
     """ Runs package for content discovery and processing"""
     # check if forecast workflow is enabled
     if not common.setting('Enabled'):
         return
     # check provider configuration
     provider = self.getProviderByCode(common.setting('ProviderCode'))
     common.debug("Found provider to run forecast workflow: %s" % provider)
     if provider is None:
         common.NotificationMsg(32202, 15000)
         return
     if provider is not None and (
         (common.setting('APIKey') == ''
          or common.setting('APIKey') is None)
             and common.any2bool(common.setting("ShowAPIKeyOption"))):
         common.NotificationMsg(32123, 15000)
         return
     # validate provider configuration
     if common.any2bool(common.setting("ShowAPIKeyOption")):
         try:
             provider.validate()
             common.debug(
                 "Content provider is valid, running weather forecast workflow"
             )
         except BaseException as err:
             common.debug(
                 "Content provider is invalid, reset forecast skin properties: %s"
                 % str(err))
             common.NotificationMsg(32203, 20000)
             provider.clear()
             return
     # normalize locations
     count = 0
     found = False
     for id in range(1, 6):
         locname = common.setting('Location%iAction' % id)
         locid = common.setting('Location%i' % id)
         if not found and (locname != '' and locid != ''):
             count += 1
         elif not found and (locname == '' or locid == ''):
             found = True
         if found:
             common.setSkinProperty(12600, 'Location%i' % id)
             common.setsetting('Location%iAction' % id)
             common.setsetting('Location%i' % id)
         else:
             common.setSkinProperty(12600, 'Location%i' % id, locname)
     common.setSkinProperty(12600, 'Locations', str(count))
     common.debug("Active locations: %s" % str(count))
     # identify the right location
     if index is None:
         common.debug(
             'Run GeoIP location discovery due to missing configuration')
         locname, locid = provider.geoip()
     else:
         common.debug("Using location index: %s" % str(index))
         locname = common.setting('Location%sAction' % str(index))
         locid = common.setting('Location%s' % str(index))
     if locid == '' and common.any2int(index) > 1:
         common.debug(
             'Trying first location instead, due to invalid index defined in previous configuration'
         )
         locname = common.setting('Location1Action')
         locid = common.setting('Location1')
     if locid == '':
         common.debug(
             'Run GeoIP location discovery due to wrong configuration')
         locname, locid = provider.geoip()
     # run forecast workflow
     if locname != '':
         # reset skin properties when the location is changed
         if locid != provider.skininfo("Current.Location"):
             provider.clear()
         # publish provider details
         provider.skinproperty('WeatherProvider', provider.name())
         if os.path.isfile(
                 common.path('resources', 'media',
                             provider.code() + '.png')):
             provider.skinproperty(
                 'WeatherProviderLogo',
                 common.path('resources', 'media',
                             provider.code() + '.png'))
         else:
             provider.skinproperty('WeatherProviderLogo',
                                   common.path('icon.png'))
         # call provider forecast
         common.debug('Call forecast for location %s (%s)' %
                      (locname, locid))
         provider.forecast(locname, locid)
     else:
         common.warn('No location found or configured')
         provider.clear()
예제 #37
0
 def backup(self):
     self.status = 0
     # check if remote path exists
     if self.remoteFS.exists(self.remoteFS.RootPath):
         # may be data in here already
         common.debug("Remote path exists - may have old files in it",
                      "SystemRecovery")
     else:
         # make the remote directory
         self.remoteFS.mkdir(self.remoteFS.RootPath)
     # create a validation file for backup rotation
     if not self._createValidationFile():
         # we may not be able to write to this destination for some reason
         common.error(
             "Validation file can not be created, backup process skipped")
         self.status = -1
         return
     common.debug("Creating files list", "SystemRecovery")
     allFiles = []
     fileManager = FileManager(self.localFS)
     # go through each of the user selected items and write them to the backup store
     if common.setting('backup_addons'):
         fileManager.addFile("-" + common.path('special://home/addons'))
         fileManager.walkTree(common.path('special://home/addons'))
     fileManager.addFile("-" + common.path('special://home/userdata'))
     if common.setting('backup_addon_data'):
         fileManager.addFile(
             "-" + common.path('special://home/userdata/addon_data'))
         fileManager.walkTree(
             common.path('special://home/userdata/addon_data'))
     if common.setting('backup_database'):
         fileManager.addFile(
             "-" + common.path('special://home/userdata/Database'))
         fileManager.walkTree(
             common.path('special://home/userdata/Database'))
     if common.setting("backup_playlists"):
         fileManager.addFile(
             "-" + common.path('special://home/userdata/playlists'))
         fileManager.walkTree(
             common.path('special://home/userdata/playlists'))
     if common.setting('backup_profiles'):
         fileManager.addFile(
             "-" + common.path('special://home/userdata/profiles'))
         fileManager.walkTree(
             common.path('special://home/userdata/profiles'))
     if common.setting("backup_thumbnails"):
         fileManager.addFile(
             "-" + common.path('special://home/userdata/Thumbnails'))
         fileManager.walkTree(
             common.path('special://home/userdata/Thumbnails'))
     if common.setting("backup_config"):
         fileManager.addFile("-" +
                             common.path('special://home/userdata/keymaps'))
         fileManager.walkTree(
             common.path('special://home/userdata/keymaps'))
         fileManager.addFile(
             "-" + common.path('special://home/userdata/peripheral_data'))
         fileManager.walkTree(
             common.path('special://home/userdata/peripheral_data'))
         fileManager.addFile('-' +
                             common.path('special://home/userdata/library'))
         fileManager.walkTree(
             common.path('special://home/userdata/library'))
         # this part is an oddity
         dirs, configFiles = self.localFS.listdir(
             common.path('special://home/userdata/'))
         for aFile in configFiles:
             if aFile.endswith(".xml"):
                 fileManager.addFile(
                     common.path('special://home/userdata/') + aFile)
     # add to array
     allFiles.append({
         "source": self.localFS.RootPath,
         "dest": self.remoteFS.RootPath,
         "files": fileManager.getFiles()
     })
     orig_base_path = self.remoteFS.RootPath
     # check if there are custom directories
     if common.setting('custom_dir_1_enable') and common.setting(
             'backup_custom_dir_1'
     ) is not None and common.setting('backup_custom_dir_1') != '':
         # create a special remote path with hash
         self.localFS.setRootPath(common.setting('backup_custom_dir_1'))
         fileManager.addFile("-custom_" +
                             self._createCRC(self.localFS.RootPath))
         # walk the directory
         fileManager.walkTree(self.localFS.RootPath)
         allFiles.append({
             "source":
             self.localFS.RootPath,
             "dest":
             self.remoteFS.RootPath + "custom_" +
             self._createCRC(self.localFS.RootPath),
             "files":
             fileManager.getFiles()
         })
     if common.setting('custom_dir_2_enable') and common.setting(
             'backup_custom_dir_2'
     ) is not None and common.setting('backup_custom_dir_2') != '':
         # create a special remote path with hash
         self.localFS.setRootPath(common.setting('backup_custom_dir_2'))
         fileManager.addFile("-custom_" +
                             self._createCRC(self.localFS.RootPath))
         # walk the directory
         fileManager.walkTree(self.localFS.RootPath)
         allFiles.append({
             "source":
             self.localFS.RootPath,
             "dest":
             self.remoteFS.RootPath + "custom_" +
             self._createCRC(self.localFS.RootPath),
             "files":
             fileManager.getFiles()
         })
     if common.setting('custom_dir_3_enable') and common.setting(
             'backup_custom_dir_3'
     ) is not None and common.setting('backup_custom_dir_3') != '':
         # create a special remote path with hash
         self.localFS.setRootPath(common.setting('backup_custom_dir_3'))
         fileManager.addFile("-custom_" +
                             self._createCRC(self.localFS.RootPath))
         # walk the directory
         fileManager.walkTree(self.localFS.RootPath)
         allFiles.append({
             "source":
             self.localFS.RootPath,
             "dest":
             self.remoteFS.RootPath + "custom_" +
             self._createCRC(self.localFS.RootPath),
             "files":
             fileManager.getFiles()
         })
     # backup all the files
     for fileGroup in allFiles:
         self.localFS.setRootPath(fileGroup['source'])
         self.remoteFS.setRootPath(fileGroup['dest'])
         filesCopied = self.backupFiles(fileGroup['files'], self.localFS,
                                        self.remoteFS)
         if not filesCopied:
             common.warn(
                 "Not all files were copied: %s" % self.localFS.RootPath,
                 "SystemRecovery")
             self.status += 1
     # reset remote and xbmc vfs
     self.localFS.setRootPath("special://home/")
     self.remoteFS.setRootPath(orig_base_path)
     # send the zip file to the real remote vfs
     if common.setting("compress_backups"):
         zip_name = self.remoteFS.RootPath[:-1] + ".zip"
         self.remoteFS.cleanup()
         self.localFS.rename(common.path("special://temp/backup.zip"),
                             common.path("special://temp/" + zip_name))
         fileManager.addFile(common.path("special://temp/" + zip_name))
         # set root to data dir home
         self.localFS.setRootPath(common.path("special://temp/"))
         self.remoteFS = self.savedRemoteFS
         fileCopied = self.backupFiles(fileManager.getFiles(), self.localFS,
                                       self.remoteFS)
         if not fileCopied:
             # zip archive copy filed, inform the user
             common.warn("The destination may not be writeable: %s" %
                         self.remoteFS.RootPath)
             self.ststus = -1
             return
         # delete the temp zip file
         self.localFS.rmfile(common.path("special://temp/" + zip_name))
     # remove old backups
     self._rotateBackups()