コード例 #1
0
ファイル: _manage.py プロジェクト: B-Rich/entropy
    def _get_config_protect(self, entropy_repository, package_id, mask = False,
                            _metadata = None):
        """
        Return configuration protection (or mask) metadata for the given
        package.
        This method should not be used as source for storing metadata into
        repositories since the returned objects may not be decoded in utf-8.
        Data returned by this method is expected to be used only by internal
        functions.
        """
        misc_data = self._entropy.ClientSettings()['misc']

        if mask:
            paths = entropy_repository.retrieveProtectMask(package_id).split()
            misc_key = "configprotectmask"
        else:
            paths = entropy_repository.retrieveProtect(package_id).split()
            misc_key = "configprotect"

        if _metadata is None:
            _metadata = self._meta
        root = self._get_system_root(_metadata)
        config = set(("%s%s" % (root, path) for path in paths))
        config.update(misc_data[misc_key])

        # os.* methods in Python 2.x do not expect unicode strings
        # This set of data is only used by _handle_config_protect atm.
        if not const_is_python3():
            config = set((const_convert_to_rawstring(x) for x in config))

        return config
コード例 #2
0
ファイル: dump.py プロジェクト: B-Rich/entropy
def serialize(myobj, ser_f, do_seek = True):
    """
    Serialize object to ser_f (file)

    @param myobj: Python object to serialize
    @type myobj: any Python picklable object
    @param ser_f: file object to write to
    @type ser_f: file object
    @keyword do_seek: move file cursor back to the beginning
        of ser_f
    @type do_seek: bool
    @return: file object where data has been written
    @rtype: file object
    @raise RuntimeError: caused by pickle.dump in case of
        system errors
    @raise EOFError: caused by pickle.dump in case of
        race conditions on multi-processing or multi-threading
    @raise IOError: caused by pickle.dump in case of
        race conditions on multi-processing or multi-threading
    @raise pickle.PicklingError: when object cannot be recreated
    """
    if const_is_python3():
        pickle.dump(myobj, ser_f, protocol = COMPAT_PICKLE_PROTOCOL,
            fix_imports = True)
    else:
        pickle.dump(myobj, ser_f)
    ser_f.flush()
    if do_seek:
        ser_f.seek(0)
    return ser_f
コード例 #3
0
    def _get_config_protect(self,
                            entropy_repository,
                            package_id,
                            mask=False,
                            _metadata=None):
        """
        Return configuration protection (or mask) metadata for the given
        package.
        This method should not be used as source for storing metadata into
        repositories since the returned objects may not be decoded in utf-8.
        Data returned by this method is expected to be used only by internal
        functions.
        """
        misc_data = self._entropy.ClientSettings()['misc']

        if mask:
            paths = entropy_repository.retrieveProtectMask(package_id).split()
            misc_key = "configprotectmask"
        else:
            paths = entropy_repository.retrieveProtect(package_id).split()
            misc_key = "configprotect"

        if _metadata is None:
            _metadata = self._meta
        root = self._get_system_root(_metadata)
        config = set(("%s%s" % (root, path) for path in paths))
        config.update(misc_data[misc_key])

        # os.* methods in Python 2.x do not expect unicode strings
        # This set of data is only used by _handle_config_protect atm.
        if not const_is_python3():
            config = set((const_convert_to_rawstring(x) for x in config))

        return config
コード例 #4
0
ファイル: output.py プロジェクト: skwerlman/entropy
def _my_raw_input(txt = ''):
    try:
        import readline
    except ImportError:
        # not available? ignore
        pass

    if not txt:
        txt = ""
    if const_is_python3():
        try:
            response = input(darkgreen(txt))
        except UnicodeEncodeError:
            response = input(darkgreen(txt.encode('utf-8')))
    else:
        try:
            response = raw_input(darkgreen(txt))
        except UnicodeEncodeError:
            response = raw_input(darkgreen(txt.encode('utf-8')))
    _flush_stdouterr()

    # try to convert to unicode, because responses are stored that
    # way, fix bug #2006.
    if not const_isunicode(response):
        try:
            response = const_convert_to_unicode(response, enctype = "utf-8")
        except (UnicodeDecodeError, UnicodeEncodeError):
            # be fault tolerant, we just tried
            pass
    return response
コード例 #5
0
ファイル: dump.py プロジェクト: skwerlman/entropy
def serialize(myobj, ser_f, do_seek=True):
    """
    Serialize object to ser_f (file)

    @param myobj: Python object to serialize
    @type myobj: any Python picklable object
    @param ser_f: file object to write to
    @type ser_f: file object
    @keyword do_seek: move file cursor back to the beginning
        of ser_f
    @type do_seek: bool
    @return: file object where data has been written
    @rtype: file object
    @raise RuntimeError: caused by pickle.dump in case of
        system errors
    @raise EOFError: caused by pickle.dump in case of
        race conditions on multi-processing or multi-threading
    @raise IOError: caused by pickle.dump in case of
        race conditions on multi-processing or multi-threading
    @raise pickle.PicklingError: when object cannot be recreated
    """
    if const_is_python3():
        pickle.dump(myobj,
                    ser_f,
                    protocol=COMPAT_PICKLE_PROTOCOL,
                    fix_imports=True)
    else:
        pickle.dump(myobj, ser_f)
    ser_f.flush()
    if do_seek:
        ser_f.seek(0)
    return ser_f
コード例 #6
0
ファイル: output.py プロジェクト: skwerlman/entropy
 def option_chooser(option_data):
     mydict = {}
     counter = 1
     option_text, option_list = option_data
     cls.output(option_text)
     for item in option_list:
         mydict[counter] = item
         txt = "[%s] %s" % (darkgreen(str(counter)), blue(item),)
         cls.output(txt)
         counter += 1
     while True:
         try:
             if const_is_python3():
                 myresult = const_convert_to_unicode(
                     readtext("%s: " % (_('Selected number'),)),
                     enctype = "utf-8")
             else:
                 myresult = readtext(
                     "%s: " % (_('Selected number'),)).decode('utf-8')
         except UnicodeDecodeError:
             continue
         except UnicodeEncodeError:
             continue
         try:
             myresult = int(myresult)
         except ValueError:
             continue
         selected = mydict.get(myresult)
         if selected != None:
             return myresult, selected
コード例 #7
0
ファイル: dump.py プロジェクト: B-Rich/entropy
def loadobj(name, complete_path = False, dump_dir = None, aging_days = None):
    """
    Load object from a file
    @param name: name of the object to load
    @type name: string
    @keyword complete_path: determine whether name argument
        is a complete disk path to serialized object
    @type complete_path: bool
    @keyword dump_dir: alternative dump directory
    @type dump_dir: string
    @keyword aging_days: if int, consider the cached file invalid
        if older than aging_days.
    @type aging_days: int
    @return: object or None
    @rtype: any Python pickable object or None
    """
    if dump_dir is None:
        dump_dir = D_DIR

    while True:
        if complete_path:
            dmpfile = name
        else:
            dump_path = os.path.join(dump_dir, name)
            dmpfile = dump_path + D_EXT

        if aging_days is not None:
            cur_t = time.time()
            try:
                mtime = os.path.getmtime(dmpfile)
            except (IOError, OSError):
                mtime = 0.0
            if abs(cur_t - mtime) > (aging_days * 86400):
                # do not unlink since other consumers might
                # have different aging settings.
                #try:
                #    os.remove(dmpfile)
                #except (OSError, IOError):
                #    # did my best
                #    pass
                return None

        try:
            with open(dmpfile, "rb") as dmp_f:
                obj = None
                try:
                    if const_is_python3():
                        obj = pickle.load(dmp_f, fix_imports = True,
                            encoding = etpConst['conf_raw_encoding'])
                    else:
                        obj = pickle.load(dmp_f)
                except (ValueError, EOFError, IOError,
                    OSError, pickle.UnpicklingError, TypeError,
                    AttributeError, ImportError, SystemError,):
                    pass
                return obj
        except (IOError, OSError,):
            pass
        break
コード例 #8
0
 def __encode_url(self, url):
     if const_is_python3():
         import urllib.parse as encurl
     else:
         import urllib as encurl
     url = os.path.join(os.path.dirname(url),
         encurl.quote(os.path.basename(url)))
     return url
コード例 #9
0
ファイル: interfaces.py プロジェクト: jgarte/entropy
 def _dbus_to_unicode(self, dbus_string):
     """
     Convert dbus.String() to unicode object
     """
     if const_is_python3():
         return str(dbus_string)
     else:
         return dbus_string.decode(etpConst['conf_encoding'])
コード例 #10
0
ファイル: fetchers.py プロジェクト: Heather/entropy
 def __encode_url(self, url):
     if const_is_python3():
         import urllib.parse as encurl
     else:
         import urllib as encurl
     url = os.path.join(os.path.dirname(url),
         encurl.quote(os.path.basename(url)))
     return url
コード例 #11
0
ファイル: xpak.py プロジェクト: B-Rich/entropy
def encodeint(myint):
    """Takes a 4 byte integer and converts it into a string of 4 characters.
    Returns the characters in a string."""
    part1 = chr((myint >> 24 ) & 0x000000ff)
    part2 = chr((myint >> 16 ) & 0x000000ff)
    part3 = chr((myint >> 8 ) & 0x000000ff)
    part4 = chr(myint & 0x000000ff)
    if const_is_python3():
        return bytes(part1 + part2 + part3 + part4, 'raw_unicode_escape')
    else:
        return part1 + part2 + part3 + part4
コード例 #12
0
def encodeint(myint):
    """Takes a 4 byte integer and converts it into a string of 4 characters.
    Returns the characters in a string."""
    part1 = chr((myint >> 24) & 0x000000ff)
    part2 = chr((myint >> 16) & 0x000000ff)
    part3 = chr((myint >> 8) & 0x000000ff)
    part4 = chr(myint & 0x000000ff)
    if const_is_python3():
        return bytes(part1 + part2 + part3 + part4, 'raw_unicode_escape')
    else:
        return part1 + part2 + part3 + part4
コード例 #13
0
ファイル: client.py プロジェクト: dMaggot/entropy
 def _get_cache_key(self, method, params):
     """
     Return on disk cache file name as key, given a method name and its
     parameters.
     """
     sorted_data = [(x, params[x]) for x in sorted(params.keys())]
     hash_str = repr(sorted_data) + ", " + self._request_url
     if const_is_python3():
         hash_str = hash_str.encode("utf-8")
     sha = hashlib.sha1()
     sha.update(hash_str)
     return method + "_" + sha.hexdigest()
コード例 #14
0
ファイル: utils.py プロジェクト: jgarte/entropy
def prepare_markup(text):
    """
    Convert text to raw bytestring to make GTK3 happy.
    """
    if const_is_python3():
        return text  # str()-strings just work

    if const_isunicode(text):
        return \
            const_convert_to_rawstring(
                text, from_enctype=etpConst['conf_encoding'])
    return text
コード例 #15
0
def _std_write(msg, stderr=False):
    if not const_isstring(msg):
        msg = repr(msg)
    obj = sys.stdout
    if stderr:
        obj = sys.stderr

    if const_is_python3() and not const_isunicode(msg):
        obj.flush()
        obj.buffer.write(msg)
        obj.flush()
        return

    try:
        obj.write(msg)
    except UnicodeEncodeError:
        msg = msg.encode('utf-8')
        if const_is_python3():
            obj.buffer.write(msg)
        else:
            obj.write(msg)
コード例 #16
0
ファイル: client.py プロジェクト: skwerlman/entropy
 def _get_cache_key(self, method, params):
     """
     Return on disk cache file name as key, given a method name and its
     parameters.
     """
     sorted_data = [(x, params[x]) for x in sorted(params.keys())]
     hash_str = repr(sorted_data) + ", " + self._request_url
     if const_is_python3():
         hash_str = hash_str.encode("utf-8")
     sha = hashlib.sha1()
     sha.update(hash_str)
     return method + "_" + sha.hexdigest()
コード例 #17
0
    def _get_config_protect_skip(self):
        """
        Return the configuration protection path set.
        """
        misc_settings = self._entropy.ClientSettings()['misc']
        protectskip = misc_settings['configprotectskip']

        if not const_is_python3():
            protectskip = set((const_convert_to_rawstring(
                x, from_enctype=etpConst['conf_encoding'])
                               for x in misc_settings['configprotectskip']))

        return protectskip
コード例 #18
0
ファイル: _manage.py プロジェクト: B-Rich/entropy
    def _get_config_protect_skip(self):
        """
        Return the configuration protection path set.
        """
        misc_settings = self._entropy.ClientSettings()['misc']
        protectskip = misc_settings['configprotectskip']

        if not const_is_python3():
            protectskip = set((
                const_convert_to_rawstring(
                    x, from_enctype = etpConst['conf_encoding']) for x in
                misc_settings['configprotectskip']))

        return protectskip
コード例 #19
0
ファイル: dump.py プロジェクト: B-Rich/entropy
def unserialize(serial_f):
    """
    Unserialize file to object (file)

    @param serial_f: file object which data will be read from
    @type serial_f: file object
    @return: rebuilt object
    @rtype: any Python pickable object
    @raise pickle.UnpicklingError: when object cannot be recreated
    """
    if const_is_python3():
        return pickle.load(serial_f, fix_imports = True,
            encoding = etpConst['conf_raw_encoding'])
    else:
        return pickle.load(serial_f)
コード例 #20
0
ファイル: output.py プロジェクト: B-Rich/entropy
def _std_write(msg, stderr = False):
    if not const_isstring(msg):
        msg = repr(msg)
    obj = sys.stdout
    if stderr:
        obj = sys.stderr

    try:
        obj.write(msg)
    except UnicodeEncodeError:
        msg = msg.encode('utf-8')
        if const_is_python3():
            obj.buffer.write(msg)
        else:
            obj.write(msg)
コード例 #21
0
ファイル: xpak.py プロジェクト: B-Rich/entropy
def decodeint(mystring):
    """Takes a 4 byte string and converts it into a 4 byte integer.
    Returns an integer."""
    myint = 0
    if const_is_python3():
        myint = myint+mystring[3]
        myint = myint+(mystring[2] << 8)
        myint = myint+(mystring[1] << 16)
        myint = myint+(mystring[0] << 24)
    else:
        myint = myint+ord(mystring[3])
        myint = myint+(ord(mystring[2]) << 8)
        myint = myint+(ord(mystring[1]) << 16)
        myint = myint+(ord(mystring[0]) << 24)
    return myint
コード例 #22
0
ファイル: dump.py プロジェクト: B-Rich/entropy
def unserialize_string(mystring):
    """
    Unserialize pickle string to object

    @param mystring: data stream in string form to reconstruct
    @type mystring: string
    @return: reconstructed object
    @rtype: any Python pickable object
    @raise pickle.UnpicklingError: when object cannot be recreated
    """
    if const_is_python3():
        return pickle.loads(mystring, fix_imports = True,
            encoding = etpConst['conf_raw_encoding'])
    else:
        return pickle.loads(mystring)
コード例 #23
0
def decodeint(mystring):
    """Takes a 4 byte string and converts it into a 4 byte integer.
    Returns an integer."""
    myint = 0
    if const_is_python3():
        myint = myint + mystring[3]
        myint = myint + (mystring[2] << 8)
        myint = myint + (mystring[1] << 16)
        myint = myint + (mystring[0] << 24)
    else:
        myint = myint + ord(mystring[3])
        myint = myint + (ord(mystring[2]) << 8)
        myint = myint + (ord(mystring[1]) << 16)
        myint = myint + (ord(mystring[0]) << 24)
    return myint
コード例 #24
0
ファイル: dump.py プロジェクト: B-Rich/entropy
def serialize_string(myobj):
    """
    Serialize object to string

    @param myobj: object to serialize
    @type myobj: any Python picklable object
    @return: serialized string
    @rtype: string
    @raise pickle.PicklingError: when object cannot be recreated
    """
    if const_is_python3():
        return pickle.dumps(myobj, protocol = COMPAT_PICKLE_PROTOCOL,
            fix_imports = True, encoding = etpConst['conf_raw_encoding'])
    else:
        return pickle.dumps(myobj)
コード例 #25
0
ファイル: dump.py プロジェクト: skwerlman/entropy
def unserialize_string(mystring):
    """
    Unserialize pickle string to object

    @param mystring: data stream in string form to reconstruct
    @type mystring: string
    @return: reconstructed object
    @rtype: any Python pickable object
    @raise pickle.UnpicklingError: when object cannot be recreated
    """
    if const_is_python3():
        return pickle.loads(mystring,
                            fix_imports=True,
                            encoding=etpConst['conf_raw_encoding'])
    else:
        return pickle.loads(mystring)
コード例 #26
0
ファイル: dump.py プロジェクト: skwerlman/entropy
def unserialize(serial_f):
    """
    Unserialize file to object (file)

    @param serial_f: file object which data will be read from
    @type serial_f: file object
    @return: rebuilt object
    @rtype: any Python pickable object
    @raise pickle.UnpicklingError: when object cannot be recreated
    """
    if const_is_python3():
        return pickle.load(serial_f,
                           fix_imports=True,
                           encoding=etpConst['conf_raw_encoding'])
    else:
        return pickle.load(serial_f)
コード例 #27
0
ファイル: dump.py プロジェクト: skwerlman/entropy
def serialize_string(myobj):
    """
    Serialize object to string

    @param myobj: object to serialize
    @type myobj: any Python picklable object
    @return: serialized string
    @rtype: string
    @raise pickle.PicklingError: when object cannot be recreated
    """
    if const_is_python3():
        return pickle.dumps(myobj,
                            protocol=COMPAT_PICKLE_PROTOCOL,
                            fix_imports=True,
                            encoding=etpConst['conf_raw_encoding'])
    else:
        return pickle.dumps(myobj)
コード例 #28
0
ファイル: xpak.py プロジェクト: B-Rich/entropy
def xpak_mem(mydata):
    """Create an xpack segement from a map object."""
    if const_is_python3():
        indexglob = b""
        dataglob = b""
    else:
        indexglob = ""
        dataglob = ""
    indexpos = 0
    datapos = 0

    for x, newglob in mydata.items():
        mydatasize = len(newglob)
        int_enc_dir = encodeint(len(x)) + x
        indexglob += int_enc_dir + encodeint(datapos) + encodeint(mydatasize)
        indexpos = indexpos + 4 + len(x) + 4 + 4
        dataglob = dataglob + newglob
        datapos += mydatasize
    return XPAKPACK \
    + encodeint(len(indexglob)) \
    + encodeint(len(dataglob)) \
    + indexglob \
    + dataglob \
    + XPAKSTOP
コード例 #29
0
def xpak_mem(mydata):
    """Create an xpack segement from a map object."""
    if const_is_python3():
        indexglob = b""
        dataglob = b""
    else:
        indexglob = ""
        dataglob = ""
    indexpos = 0
    datapos = 0

    for x, newglob in mydata.items():
        mydatasize = len(newglob)
        int_enc_dir = encodeint(len(x)) + x
        indexglob += int_enc_dir + encodeint(datapos) + encodeint(mydatasize)
        indexpos = indexpos + 4 + len(x) + 4 + 4
        dataglob = dataglob + newglob
        datapos += mydatasize
    return XPAKPACK \
    + encodeint(len(indexglob)) \
    + encodeint(len(dataglob)) \
    + indexglob \
    + dataglob \
    + XPAKSTOP
コード例 #30
0
ファイル: _manage.py プロジェクト: B-Rich/entropy
    def _remove_content_from_system_loop(self, inst_repo, remove_atom,
                                         remove_content, remove_config,
                                         affected_directories,
                                         affected_infofiles,
                                         directories, directories_cache,
                                         preserved_mgr,
                                         not_removed_due_to_collisions,
                                         colliding_path_messages,
                                         automerge_metadata, col_protect,
                                         protect, mask, protectskip,
                                         sys_root):
        """
        Body of the _remove_content_from_system() method.
        """
        info_dirs = self._get_info_directories()

        # collect all the library paths to be preserved
        # in the final removal loop.
        preserved_lib_paths = set()

        if self.PRESERVED_LIBS_ENABLED:
            for _pkg_id, item, _ftype in remove_content:

                # determine without sys_root
                paths = self._handle_preserved_lib(
                    item, remove_atom, preserved_mgr)
                if paths is not None:
                    preserved_lib_paths.update(paths)

        for _pkg_id, item, _ftype in remove_content:

            if not item:
                continue # empty element??

            sys_root_item = sys_root + item
            sys_root_item_encoded = sys_root_item
            if not const_is_python3():
                # this is coming from the db, and it's pure utf-8
                sys_root_item_encoded = const_convert_to_rawstring(
                    sys_root_item,
                    from_enctype = etpConst['conf_raw_encoding'])

            # collision check
            if col_protect > 0:

                if inst_repo.isFileAvailable(item) \
                    and os.path.isfile(sys_root_item_encoded):

                    # in this way we filter out directories
                    colliding_path_messages.add(sys_root_item)
                    not_removed_due_to_collisions.add(item)
                    continue

            protected = False
            in_mask = False

            if not remove_config:

                protected_item_test = sys_root_item
                (in_mask, protected, _x,
                 do_continue) = self._handle_config_protect(
                     protect, mask, protectskip, None, protected_item_test,
                     do_allocation_check = False, do_quiet = True
                 )

                if do_continue:
                    protected = True

            # when files have not been modified by the user
            # and they are inside a config protect directory
            # we could even remove them directly
            if in_mask:

                oldprot_md5 = automerge_metadata.get(item)
                if oldprot_md5:

                    try:
                        in_system_md5 = entropy.tools.md5sum(
                            protected_item_test)
                    except (OSError, IOError) as err:
                        if err.errno != errno.ENOENT:
                            raise
                        in_system_md5 = "?"

                    if oldprot_md5 == in_system_md5:
                        prot_msg = _("Removing config file, never modified")
                        mytxt = "%s: %s" % (
                            darkgreen(prot_msg),
                            blue(item),
                        )
                        self._entropy.output(
                            mytxt,
                            importance = 1,
                            level = "info",
                            header = red("   ## ")
                        )
                        protected = False
                        do_continue = False

            # Is file or directory a protected item?
            if protected:
                self._entropy.logger.log(
                    "[Package]",
                    etpConst['logging']['verbose_loglevel_id'],
                    "[remove] Protecting config file: %s" % (sys_root_item,)
                )
                mytxt = "[%s] %s: %s" % (
                    red(_("remove")),
                    brown(_("Protecting config file")),
                    sys_root_item,
                )
                self._entropy.output(
                    mytxt,
                    importance = 1,
                    level = "warning",
                    header = red("   ## ")
                )
                continue

            try:
                os.lstat(sys_root_item_encoded)
            except OSError as err:
                if err.errno in (errno.ENOENT, errno.ENOTDIR):
                    continue # skip file, does not exist
                raise

            except UnicodeEncodeError:
                msg = _("This package contains a badly encoded file !!!")
                mytxt = brown(msg)
                self._entropy.output(
                    red("QA: ")+mytxt,
                    importance = 1,
                    level = "warning",
                    header = darkred("   ## ")
                )
                continue # file has a really bad encoding

            if os.path.isdir(sys_root_item_encoded) and \
                os.path.islink(sys_root_item_encoded):
                # S_ISDIR returns False for directory symlinks,
                # so using os.path.isdir valid directory symlink
                if sys_root_item not in directories_cache:
                    # collect for Trigger
                    affected_directories.add(item)
                    directories.add((sys_root_item, "link"))
                    directories_cache.add(sys_root_item)
                continue

            if os.path.isdir(sys_root_item_encoded):
                # plain directory
                if sys_root_item not in directories_cache:
                    # collect for Trigger
                    affected_directories.add(item)
                    directories.add((sys_root_item, "dir"))
                    directories_cache.add(sys_root_item)
                continue

            # files, symlinks or not
            # just a file or symlink or broken
            # directory symlink (remove now)

            # skip file removal if item is a preserved library.
            if item in preserved_lib_paths:
                self._entropy.logger.log(
                    "[Package]",
                    etpConst['logging']['normal_loglevel_id'],
                    "[remove] skipping removal of: %s" % (sys_root_item,)
                )
                continue

            try:
                os.remove(sys_root_item_encoded)
            except OSError as err:
                self._entropy.logger.log(
                    "[Package]",
                    etpConst['logging']['normal_loglevel_id'],
                    "[remove] Unable to remove %s, error: %s" % (
                        sys_root_item, err,)
                )
                continue

            # collect for Trigger
            dir_name = os.path.dirname(item)
            affected_directories.add(dir_name)

            # account for info files, if any
            if dir_name in info_dirs:
                for _ext in self._INFO_EXTS:
                    if item.endswith(_ext):
                        affected_infofiles.add(item)
                        break

            # add its parent directory
            dirobj = const_convert_to_unicode(
                os.path.dirname(sys_root_item_encoded))
            if dirobj not in directories_cache:
                if os.path.isdir(dirobj) and os.path.islink(dirobj):
                    directories.add((dirobj, "link"))
                elif os.path.isdir(dirobj):
                    directories.add((dirobj, "dir"))

                directories_cache.add(dirobj)
コード例 #31
0
ファイル: xpaktools.py プロジェクト: jgarte/entropy
def suck_xpak(tbz2file, xpakpath):
    """
    docstring_title

    @param tbz2file: 
    @type tbz2file: 
    @param xpakpath: 
    @type xpakpath: 
    @return: 
    @rtype: 
    """
    if const_is_python3():
        xpak_end = b"XPAKSTOP"
        xpak_start = b"XPAKPACK"
    else:
        xpak_end = "XPAKSTOP"
        xpak_start = "XPAKPACK"

    chunk_size = 2048

    # Sanity check: makes the position calculations easier (seek_length below).
    assert len(xpak_end) == len(xpak_start)

    old, db = None, None
    try:
        old = open(tbz2file, "rb")
        db = open(xpakpath, "wb")
        data_start_position = None
        data_end_position = None
        # position old to the end
        old.seek(0, os.SEEK_END)
        n_bytes = old.tell()

        chunk_size = min(chunk_size, n_bytes)

        # position one chunk from the end, then continue
        seek_pos = n_bytes - chunk_size

        while True:
            old.seek(seek_pos, os.SEEK_SET)
            read_bytes = old.read(chunk_size)

            end_idx = read_bytes.rfind(xpak_end)
            if end_idx != -1:
                if data_start_position is None:
                    data_end_position = seek_pos + end_idx + len(xpak_end)
                    # avoid START after END in rfind()
                    read_bytes = read_bytes[:end_idx]

            start_idx = read_bytes.rfind(xpak_start)
            if start_idx != -1:
                if data_end_position is not None:
                    data_start_position = seek_pos + start_idx
                    break

            if seek_pos == 0:
                break

            # Make sure the seeks are so that there is enough overlap.
            seek_length = chunk_size - (len(xpak_start) - 1)
            seek_pos -= seek_length
            if seek_pos < 0:
                seek_pos = 0

        if data_start_position is None:
            return False
        if data_end_position is None:
            return False

        # now write to found metadata to file
        # starting from data_start_position
        # ending to data_end_position
        old.seek(data_start_position)
        to_read = data_end_position - data_start_position
        while to_read > 0:
            data = old.read(to_read)
            db.write(data)
            to_read -= len(data)
        return True

    finally:
        if old is not None:
            old.close()
        if db is not None:
            db.close()
コード例 #32
0
ファイル: xpaktools.py プロジェクト: Sabayon/entropy
def suck_xpak(tbz2file, xpakpath):
    """
    docstring_title

    @param tbz2file: 
    @type tbz2file: 
    @param xpakpath: 
    @type xpakpath: 
    @return: 
    @rtype: 
    """
    if const_is_python3():
        xpak_end = b"XPAKSTOP"
        xpak_start = b"XPAKPACK"
    else:
        xpak_end = "XPAKSTOP"
        xpak_start = "XPAKPACK"

    chunk_size = 2048

    # Sanity check: makes the position calculations easier (seek_length below).
    assert len(xpak_end) == len(xpak_start)

    old, db = None, None
    try:
        old = open(tbz2file, "rb")
        db = open(xpakpath, "wb")
        data_start_position = None
        data_end_position = None
        # position old to the end
        old.seek(0, os.SEEK_END)
        n_bytes = old.tell()

        chunk_size = min(chunk_size, n_bytes)

        # position one chunk from the end, then continue
        seek_pos = n_bytes - chunk_size

        while True:
            old.seek(seek_pos, os.SEEK_SET)
            read_bytes = old.read(chunk_size)

            end_idx = read_bytes.rfind(xpak_end)
            if end_idx != -1:
                if data_start_position is None:
                    data_end_position = seek_pos + end_idx + len(xpak_end)
                    # avoid START after END in rfind()
                    read_bytes = read_bytes[:end_idx]

            start_idx = read_bytes.rfind(xpak_start)
            if start_idx != -1:
                if data_end_position is not None:
                    data_start_position = seek_pos + start_idx
                    break

            if seek_pos == 0:
                break

            # Make sure the seeks are so that there is enough overlap.
            seek_length = chunk_size - (len(xpak_start) - 1)
            seek_pos -= seek_length
            if seek_pos < 0:
                seek_pos = 0

        if data_start_position is None:
            return False
        if data_end_position is None:
            return False

        # now write to found metadata to file
        # starting from data_start_position
        # ending to data_end_position
        old.seek(data_start_position)
        to_read = data_end_position - data_start_position
        while to_read > 0:
            data = old.read(to_read)
            db.write(data)
            to_read -= len(data)
        return True

    finally:
        if old is not None:
            old.close()
        if db is not None:
            db.close()
コード例 #33
0
ファイル: client.py プロジェクト: dMaggot/entropy
    def _generic_post_handler(self, function_name, params, file_params,
        timeout):
        """
        Given a function name and the request data (dict format), do the actual
        HTTP request and return the response object to caller.
        WARNING: params and file_params dict keys must be ASCII string only.

        @param function_name: name of the function that called this method
        @type function_name: string
        @param params: POST parameters
        @type params: dict
        @param file_params: mapping composed by file names as key and tuple
            composed by (file_name, file object) as values
        @type file_params: dict
        @param timeout: socket timeout
        @type timeout: float
        @return: tuple composed by the server response string or None
            (in case of empty response) and the HTTPResponse object (useful
                for checking response status)
        @rtype: tuple
        """
        if timeout is None:
            timeout = self._default_timeout_secs
        multipart_boundary = "---entropy.services,boundary---"
        request_path = self._request_path.rstrip("/") + "/" + function_name
        const_debug_write(__name__,
            "WebService _generic_post_handler, calling: %s at %s -- %s,"
            " tx_callback: %s, timeout: %s" % (self._request_host, request_path,
                params, self._transfer_callback, timeout,))
        connection = None
        try:
            if self._request_protocol == "http":
                connection = httplib.HTTPConnection(self._request_host,
                    timeout = timeout)
            elif self._request_protocol == "https":
                connection = httplib.HTTPSConnection(self._request_host,
                    timeout = timeout)
            else:
                raise WebService.RequestError("invalid request protocol",
                    method = function_name)

            headers = {
                "Accept": "text/plain",
                "User-Agent": self._generate_user_agent(function_name),
            }

            if file_params is None:
                file_params = {}
            # autodetect file parameters in params
            for k in list(params.keys()):
                if isinstance(params[k], (tuple, list)) \
                    and (len(params[k]) == 2):
                    f_name, f_obj = params[k]
                    if isinstance(f_obj, file):
                        file_params[k] = params[k]
                        del params[k]
                elif const_isunicode(params[k]):
                    # convert to raw string
                    params[k] = const_convert_to_rawstring(params[k],
                        from_enctype = "utf-8")
                elif not const_isstring(params[k]):
                    # invalid ?
                    if params[k] is None:
                        # will be converted to ""
                        continue
                    int_types = const_get_int()
                    supported_types = (float, list, tuple) + int_types
                    if not isinstance(params[k], supported_types):
                        raise WebService.UnsupportedParameters(
                            "%s is unsupported type %s" % (k, type(params[k])))
                    list_types = (list, tuple)
                    if isinstance(params[k], list_types):
                        # not supporting nested lists
                        non_str = [x for x in params[k] if not \
                            const_isstring(x)]
                        if non_str:
                            raise WebService.UnsupportedParameters(
                                "%s is unsupported type %s" % (k,
                                    type(params[k])))

            body = None
            if not file_params:
                headers["Content-Type"] = "application/x-www-form-urlencoded"
                encoded_params = urllib_parse.urlencode(params)
                data_size = len(encoded_params)
                if self._transfer_callback is not None:
                    self._transfer_callback(0, data_size, False)

                if data_size < 65536:
                    try:
                        connection.request("POST", request_path, encoded_params,
                            headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                else:
                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                    sio = StringIO(encoded_params)
                    data_size = len(encoded_params)
                    while True:
                        chunk = sio.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                method = function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(sio.tell(),
                                data_size, False)
                # for both ways, send a signal through the callback
                if self._transfer_callback is not None:
                    self._transfer_callback(data_size, data_size, False)

            else:
                headers["Content-Type"] = "multipart/form-data; boundary=" + \
                    multipart_boundary
                body_file, body_fpath = self._encode_multipart_form(params,
                    file_params, multipart_boundary)
                try:
                    data_size = body_file.tell()
                    headers["Content-Length"] = str(data_size)
                    body_file.seek(0)
                    if self._transfer_callback is not None:
                        self._transfer_callback(0, data_size, False)

                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                            method = function_name)
                    while True:
                        chunk = body_file.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                method = function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(body_file.tell(),
                                data_size, False)
                    if self._transfer_callback is not None:
                        self._transfer_callback(data_size, data_size, False)
                finally:
                    body_file.close()
                    os.remove(body_fpath)

            try:
                response = connection.getresponse()
            except socket.error as err:
                raise WebService.RequestError(err,
                    method = function_name)
            const_debug_write(__name__, "WebService.%s(%s), "
                "response header: %s" % (
                    function_name, params, response.getheaders(),))
            total_length = response.getheader("Content-Length", "-1")
            try:
                total_length = int(total_length)
            except ValueError:
                total_length = -1
            outcome = const_convert_to_rawstring("")
            current_len = 0
            if self._transfer_callback is not None:
                self._transfer_callback(current_len, total_length, True)
            while True:
                try:
                    chunk = response.read(65536)
                except socket.error as err:
                    raise WebService.RequestError(err,
                        method = function_name)
                if not chunk:
                    break
                outcome += chunk
                current_len += len(chunk)
                if self._transfer_callback is not None:
                    self._transfer_callback(current_len, total_length, True)

            if self._transfer_callback is not None:
                self._transfer_callback(total_length, total_length, True)

            if const_is_python3():
                outcome = const_convert_to_unicode(outcome)
            if not outcome:
                return None, response
            return outcome, response

        except httplib.HTTPException as err:
            raise WebService.RequestError(err,
                method = function_name)
        finally:
            if connection is not None:
                connection.close()
コード例 #34
0
ファイル: skel.py プロジェクト: B-Rich/entropy
    def allocate_protected_file(package_file_path, destination_file_path):
        """
        Allocate a configuration protected file. This method returns a new
        destination_file_path value that is used by Entropy Client code to
        merge file at package_file_path to live system.
        This method offers basic support for Entropy ability to protect user
        configuration files against overwrites. Any subclass can hook code
        here in order to trigger extra actions on every acknowledged
        path modification.

        @param package_file_path: a valid file path pointing to the file
            that Entropy Client is going to move to destination_file_path
        @type package_file_path: string
        @param destination_file_path: the default destination path for given
            package_file_path. It points to the live system.
        @type destination_file_path: string
        @return: Tuple (of length 2) composed by (1) a new destination file
            path. Please note that it can be the same of the one passed
            (destination_file_path) if no protection is taken (for eg. when
            md5 of proposed_file_path and destination_file_path is the same)
            and (2) a bool informing if the function actually protected the
            destination file. Unfortunately, the bool bit is stil required
            in order to provide a valid new destination_file_path in any case.
        @rtype tuple
        """
        pkg_path_os = package_file_path
        dest_path_os = destination_file_path
        if not const_is_python3():
            pkg_path_os = const_convert_to_rawstring(package_file_path)
            dest_path_os = const_convert_to_rawstring(destination_file_path)

        if os.path.isfile(dest_path_os) and \
            os.path.isfile(pkg_path_os):
            old = entropy.tools.md5sum(package_file_path)
            new = entropy.tools.md5sum(destination_file_path)
            if old == new:
                return destination_file_path, False

        dest_dirname = os.path.dirname(destination_file_path)
        dest_basename = os.path.basename(destination_file_path)

        counter = -1
        newfile = ""
        newfile_os = newfile
        previousfile = ""
        previousfile_os = previousfile
        while True:

            counter += 1
            txtcounter = str(counter)
            oldtxtcounter = str(counter-1)
            txtcounter_len = 4-len(txtcounter)
            cnt = 0

            while cnt < txtcounter_len:
                txtcounter = "0"+txtcounter
                oldtxtcounter = "0"+oldtxtcounter
                cnt += 1

            newfile = os.path.join(dest_dirname,
                "._cfg%s_%s" % (txtcounter, dest_basename,))
            if counter > 0:
                previousfile = os.path.join(dest_dirname,
                    "._cfg%s_%s" % (oldtxtcounter, dest_basename,))
            else:
                previousfile = os.path.join(dest_dirname,
                    "._cfg0000_%s" % (dest_basename,))

            newfile_os = newfile
            if not const_is_python3():
                newfile_os = const_convert_to_rawstring(newfile)

            previousfile_os = previousfile
            if not const_is_python3():
                previousfile_os = const_convert_to_rawstring(previousfile)

            if not os.path.lexists(newfile_os):
                break

        if not newfile:
            newfile = os.path.join(dest_dirname,
                "._cfg0000_%s" % (dest_basename,))
        else:

            if os.path.exists(previousfile_os):

                # compare package_file_path with previousfile
                new = entropy.tools.md5sum(package_file_path)
                old = entropy.tools.md5sum(previousfile)
                if new == old:
                    return previousfile, False

                # compare old and new, if they match,
                # suggest previousfile directly
                new = entropy.tools.md5sum(destination_file_path)
                old = entropy.tools.md5sum(previousfile)
                if new == old:
                    return previousfile, False

        return newfile, True
コード例 #35
0
    def _remove_content_from_system_loop(
            self, inst_repo, remove_atom, remove_content, remove_config,
            affected_directories, affected_infofiles, directories,
            directories_cache, preserved_mgr, not_removed_due_to_collisions,
            colliding_path_messages, automerge_metadata, col_protect, protect,
            mask, protectskip, sys_root):
        """
        Body of the _remove_content_from_system() method.
        """
        info_dirs = self._get_info_directories()

        # collect all the library paths to be preserved
        # in the final removal loop.
        preserved_lib_paths = set()

        if self.PRESERVED_LIBS_ENABLED:
            for _pkg_id, item, _ftype in remove_content:

                # determine without sys_root
                paths = self._handle_preserved_lib(item, remove_atom,
                                                   preserved_mgr)
                if paths is not None:
                    preserved_lib_paths.update(paths)

        for _pkg_id, item, _ftype in remove_content:

            if not item:
                continue  # empty element??

            sys_root_item = sys_root + item
            sys_root_item_encoded = sys_root_item
            if not const_is_python3():
                # this is coming from the db, and it's pure utf-8
                sys_root_item_encoded = const_convert_to_rawstring(
                    sys_root_item, from_enctype=etpConst['conf_raw_encoding'])

            # collision check
            if col_protect > 0:

                if inst_repo.isFileAvailable(item) \
                    and os.path.isfile(sys_root_item_encoded):

                    # in this way we filter out directories
                    colliding_path_messages.add(sys_root_item)
                    not_removed_due_to_collisions.add(item)
                    continue

            protected = False
            in_mask = False

            if not remove_config:

                protected_item_test = sys_root_item
                (in_mask, protected, _x,
                 do_continue) = self._handle_config_protect(
                     protect,
                     mask,
                     protectskip,
                     None,
                     protected_item_test,
                     do_allocation_check=False,
                     do_quiet=True)

                if do_continue:
                    protected = True

            # when files have not been modified by the user
            # and they are inside a config protect directory
            # we could even remove them directly
            if in_mask:

                oldprot_md5 = automerge_metadata.get(item)
                if oldprot_md5:

                    try:
                        in_system_md5 = entropy.tools.md5sum(
                            protected_item_test)
                    except (OSError, IOError) as err:
                        if err.errno != errno.ENOENT:
                            raise
                        in_system_md5 = "?"

                    if oldprot_md5 == in_system_md5:
                        prot_msg = _("Removing config file, never modified")
                        mytxt = "%s: %s" % (
                            darkgreen(prot_msg),
                            blue(item),
                        )
                        self._entropy.output(mytxt,
                                             importance=1,
                                             level="info",
                                             header=red("   ## "))
                        protected = False
                        do_continue = False

            # Is file or directory a protected item?
            if protected:
                self._entropy.logger.log(
                    "[Package]", etpConst['logging']['verbose_loglevel_id'],
                    "[remove] Protecting config file: %s" % (sys_root_item, ))
                mytxt = "[%s] %s: %s" % (
                    red(_("remove")),
                    brown(_("Protecting config file")),
                    sys_root_item,
                )
                self._entropy.output(mytxt,
                                     importance=1,
                                     level="warning",
                                     header=red("   ## "))
                continue

            try:
                os.lstat(sys_root_item_encoded)
            except OSError as err:
                if err.errno in (errno.ENOENT, errno.ENOTDIR):
                    continue  # skip file, does not exist
                raise

            except UnicodeEncodeError:
                msg = _("This package contains a badly encoded file !!!")
                mytxt = brown(msg)
                self._entropy.output(red("QA: ") + mytxt,
                                     importance=1,
                                     level="warning",
                                     header=darkred("   ## "))
                continue  # file has a really bad encoding

            if os.path.isdir(sys_root_item_encoded) and \
                os.path.islink(sys_root_item_encoded):
                # S_ISDIR returns False for directory symlinks,
                # so using os.path.isdir valid directory symlink
                if sys_root_item not in directories_cache:
                    # collect for Trigger
                    affected_directories.add(item)
                    directories.add((sys_root_item, "link"))
                    directories_cache.add(sys_root_item)
                continue

            if os.path.isdir(sys_root_item_encoded):
                # plain directory
                if sys_root_item not in directories_cache:
                    # collect for Trigger
                    affected_directories.add(item)
                    directories.add((sys_root_item, "dir"))
                    directories_cache.add(sys_root_item)
                continue

            # files, symlinks or not
            # just a file or symlink or broken
            # directory symlink (remove now)

            # skip file removal if item is a preserved library.
            if item in preserved_lib_paths:
                self._entropy.logger.log(
                    "[Package]", etpConst['logging']['normal_loglevel_id'],
                    "[remove] skipping removal of: %s" % (sys_root_item, ))
                continue

            try:
                os.remove(sys_root_item_encoded)
            except OSError as err:
                self._entropy.logger.log(
                    "[Package]", etpConst['logging']['normal_loglevel_id'],
                    "[remove] Unable to remove %s, error: %s" % (
                        sys_root_item,
                        err,
                    ))
                continue

            # collect for Trigger
            dir_name = os.path.dirname(item)
            affected_directories.add(dir_name)

            # account for info files, if any
            if dir_name in info_dirs:
                for _ext in self._INFO_EXTS:
                    if item.endswith(_ext):
                        affected_infofiles.add(item)
                        break

            # add its parent directory
            dirobj = const_convert_to_unicode(
                os.path.dirname(sys_root_item_encoded))
            if dirobj not in directories_cache:
                if os.path.isdir(dirobj) and os.path.islink(dirobj):
                    directories.add((dirobj, "link"))
                elif os.path.isdir(dirobj):
                    directories.add((dirobj, "dir"))

                directories_cache.add(dirobj)
コード例 #36
0
    def input_box(cls, title, input_parameters, cancel_button=True):
        """
        Generic input box (form) creator and data collector.

        @param title: input box title
        @type title: string
        @param input_parameters: list of properly formatted tuple items.
        @type input_parameters: list
        @keyword cancel_button: make possible to "cancel" the input request.
        @type cancel_button: bool
        @return: dict containing input box answers
        @rtype: dict

        input_parameters supported items:

        [input id], [input text title], [input verification callback], [
            no text echo?]
        ('identifier 1', 'input text 1', input_verification_callback, False)

        ('item_3', ('checkbox', 'Checkbox option (boolean request) - please choose',),
            input_verification_callback, True)

        ('item_4', ('combo', ('Select your favorite option', ['option 1', 'option 2', 'option 3']),),
            input_verification_callback, True)

        ('item_4',('list',('Setup your list',['default list item 1', 'default list item 2']),),
            input_verification_callback, True)

        """
        results = {}
        if title:
            try:
                sys.stdout.write(title + "\n")
            except UnicodeEncodeError:
                sys.stdout.write(title.encode('utf-8') + "\n")
        _flush_stdouterr()

        def option_chooser(option_data):
            mydict = {}
            counter = 1
            option_text, option_list = option_data
            cls.output(option_text)
            for item in option_list:
                mydict[counter] = item
                txt = "[%s] %s" % (
                    darkgreen(str(counter)),
                    blue(item),
                )
                cls.output(txt)
                counter += 1
            while True:
                try:
                    if const_is_python3():
                        myresult = const_convert_to_unicode(readtext(
                            "%s: " % (_('Selected number'), )),
                                                            enctype="utf-8")
                    else:
                        myresult = readtext(
                            "%s: " % (_('Selected number'), )).decode('utf-8')
                except UnicodeDecodeError:
                    continue
                except UnicodeEncodeError:
                    continue
                try:
                    myresult = int(myresult)
                except ValueError:
                    continue
                selected = mydict.get(myresult)
                if selected != None:
                    return myresult, selected

        def list_editor(option_data, can_cancel, callback):
            def selaction():
                cls.output('')
                cls.output(darkred(_("Please select an option")))
                if can_cancel:
                    cls.output("  (" + blue("-1") + ") " +
                               darkred(_("Discard all")))
                cls.output("  (" + blue("0") + ")  " + darkgreen(_("Confirm")))
                cls.output("  (" + blue("1") + ")  " + brown(_("Add item")))
                cls.output("  (" + blue("2") + ")  " + brown(_("Edit item")))
                cls.output("  (" + blue("3") + ")  " +
                           darkblue(_("Remove item")))
                cls.output("  (" + blue("4") + ")  " +
                           darkgreen(_("Show current list")))
                # wait user interaction
                cls.output('')
                try:
                    action = readtext(
                        darkgreen(
                            _("Your choice (type a number and press enter):"))
                        + " ")
                except UnicodeDecodeError:
                    return ''
                return action

            mydict = {}
            counter = 1
            valid_actions = [0, 1, 2, 3, 4]
            if can_cancel:
                valid_actions.insert(0, -1)
            option_text, option_list = option_data
            txt = "%s:" % (blue(option_text), )
            cls.output(txt)

            for item in option_list:
                mydict[counter] = item
                txt = "[%s] %s" % (
                    darkgreen(str(counter)),
                    blue(item),
                )
                cls.output(txt)
                counter += 1

            def show_current_list():
                for key in sorted(mydict):
                    txt = "[%s] %s" % (
                        darkgreen(str(key)),
                        blue(mydict[key]),
                    )
                    cls.output(txt)

            while True:
                try:
                    sel_action = selaction()
                    if not sel_action:
                        show_current_list()
                    action = int(sel_action)
                except (
                        ValueError,
                        TypeError,
                ):
                    cls.output(_("You don't have typed a number."),
                               level="warning")
                    continue
                if action not in valid_actions:
                    cls.output(_("Invalid action."), level="warning")
                    continue
                if action == -1:
                    raise KeyboardInterrupt()
                elif action == 0:
                    break
                elif action == 1:  # add item
                    while True:
                        try:
                            try:
                                s_el = readtext(
                                    darkred(_(
                                        "String to add (-1 to go back):")) +
                                    " ")
                            except UnicodeDecodeError:
                                raise ValueError()
                            if s_el == "-1":
                                break
                            if not callback(s_el):
                                raise ValueError()
                            mydict[counter] = s_el
                            counter += 1
                        except (ValueError, ):
                            cls.output(_("Invalid string."), level="warning")
                            continue
                        break
                    show_current_list()
                    continue
                elif action == 2:  # edit item
                    while True:
                        try:
                            edit_msg = _(
                                "Element number to edit (-1 to go back):")
                            try:
                                s_el = int(readtext(darkred(edit_msg) + " "))
                            except UnicodeDecodeError:
                                raise ValueError()
                            if s_el == -1:
                                break
                            if s_el not in mydict:
                                raise ValueError()
                            try:
                                new_s_val = readtext("[%s: %s] %s " % (
                                    _("old"),
                                    mydict[s_el],
                                    _("new value:"),
                                ))
                            except UnicodeDecodeError:
                                new_s_val = ''
                            if not callback(new_s_val):
                                raise ValueError()
                            mydict[s_el] = new_s_val[:]
                        except (
                                ValueError,
                                TypeError,
                        ):
                            cls.output(_("Invalid element."), level="warning")
                            continue
                        break
                    show_current_list()
                    continue
                elif action == 3:  # remove item
                    while True:
                        try:
                            try:
                                s_el = int(
                                    readtext(
                                        darkred(
                                            _("Element number to remove (-1 to go back):"
                                              )) + " "))
                            except UnicodeDecodeError:
                                raise ValueError()
                            if s_el == -1:
                                break
                            if s_el not in mydict:
                                raise ValueError()
                            del mydict[s_el]
                        except (
                                ValueError,
                                TypeError,
                        ):
                            cls.output(_("Invalid element."), level="warning")
                            continue
                        break
                    show_current_list()
                    continue
                elif action == 4:  # show current list
                    show_current_list()
                    continue
                break

            mylist = [mydict[x] for x in sorted(mydict)]
            return mylist

        for identifier, input_text, callback, password in input_parameters:
            while True:
                use_cb = True
                try:
                    if isinstance(input_text, tuple):
                        myresult = False
                        input_type, data = input_text
                        if input_type == "checkbox":
                            answer = cls.ask_question(data)
                            if answer == _("Yes"):
                                myresult = True
                        elif input_type == "combo":
                            myresult = option_chooser(data)
                        elif input_type == "list":
                            use_cb = False
                            myresult = list_editor(data, cancel_button,
                                                   callback)
                    else:
                        while True:
                            try:
                                myresult = readtext(input_text + ": ",
                                                    password=password)
                                if not const_is_python3():
                                    myresult = myresult.decode("utf-8")
                            except UnicodeDecodeError:
                                continue
                            break
                except (
                        KeyboardInterrupt,
                        EOFError,
                ):
                    if not cancel_button:  # use with care
                        continue
                    return None
                valid = True
                if use_cb:
                    valid = callback(myresult)
                if valid:
                    results[identifier] = myresult
                    break
        return results
コード例 #37
0
ファイル: dump.py プロジェクト: skwerlman/entropy
def loadobj(name, complete_path=False, dump_dir=None, aging_days=None):
    """
    Load object from a file
    @param name: name of the object to load
    @type name: string
    @keyword complete_path: determine whether name argument
        is a complete disk path to serialized object
    @type complete_path: bool
    @keyword dump_dir: alternative dump directory
    @type dump_dir: string
    @keyword aging_days: if int, consider the cached file invalid
        if older than aging_days.
    @type aging_days: int
    @return: object or None
    @rtype: any Python pickable object or None
    """
    if dump_dir is None:
        dump_dir = D_DIR

    while True:
        if complete_path:
            dmpfile = name
        else:
            dump_path = os.path.join(dump_dir, name)
            dmpfile = dump_path + D_EXT

        if aging_days is not None:
            cur_t = time.time()
            try:
                mtime = os.path.getmtime(dmpfile)
            except (IOError, OSError):
                mtime = 0.0
            if abs(cur_t - mtime) > (aging_days * 86400):
                # do not unlink since other consumers might
                # have different aging settings.
                #try:
                #    os.remove(dmpfile)
                #except (OSError, IOError):
                #    # did my best
                #    pass
                return None

        try:
            with open(dmpfile, "rb") as dmp_f:
                obj = None
                try:
                    if const_is_python3():
                        obj = pickle.load(
                            dmp_f,
                            fix_imports=True,
                            encoding=etpConst['conf_raw_encoding'])
                    else:
                        obj = pickle.load(dmp_f)
                except (
                        ValueError,
                        EOFError,
                        IOError,
                        OSError,
                        pickle.UnpicklingError,
                        TypeError,
                        AttributeError,
                        ImportError,
                        SystemError,
                ):
                    pass
                return obj
        except (
                IOError,
                OSError,
        ):
            pass
        break
コード例 #38
0
    def _handle_config_protect(self,
                               protect,
                               mask,
                               protectskip,
                               fromfile,
                               tofile,
                               do_allocation_check=True,
                               do_quiet=False):
        """
        Handle configuration file protection. This method contains the logic
        for determining if a file should be protected from overwrite.
        """
        protected = False
        do_continue = False
        in_mask = False

        tofile_os = tofile
        fromfile_os = fromfile
        if not const_is_python3():
            tofile_os = const_convert_to_rawstring(tofile)
            fromfile_os = const_convert_to_rawstring(fromfile)

        if tofile in protect:
            protected = True
            in_mask = True

        elif os.path.dirname(tofile) in protect:
            protected = True
            in_mask = True

        else:
            tofile_testdir = os.path.dirname(tofile)
            old_tofile_testdir = None
            while tofile_testdir != old_tofile_testdir:
                if tofile_testdir in protect:
                    protected = True
                    in_mask = True
                    break
                old_tofile_testdir = tofile_testdir
                tofile_testdir = os.path.dirname(tofile_testdir)

        if protected:  # check if perhaps, file is masked, so unprotected

            if tofile in mask:
                protected = False
                in_mask = False

            elif os.path.dirname(tofile) in mask:
                protected = False
                in_mask = False

            else:
                tofile_testdir = os.path.dirname(tofile)
                old_tofile_testdir = None
                while tofile_testdir != old_tofile_testdir:
                    if tofile_testdir in mask:
                        protected = False
                        in_mask = False
                        break
                    old_tofile_testdir = tofile_testdir
                    tofile_testdir = os.path.dirname(tofile_testdir)

        if not os.path.lexists(tofile_os):
            protected = False  # file doesn't exist

        # check if it's a text file
        if protected:
            protected = entropy.tools.istextfile(tofile)
            in_mask = protected

        if fromfile is not None:
            if protected and os.path.lexists(fromfile_os) and (
                    not os.path.exists(fromfile_os)) and (
                        os.path.islink(fromfile_os)):
                # broken symlink, don't protect
                self._entropy.logger.log(
                    "[Package]",
                    etpConst['logging']['normal_loglevel_id'],
                    "WARNING!!! Failed to handle file protection for: " \
                    "%s, broken symlink in package" % (
                        tofile,
                    )
                )
                msg = _("Cannot protect broken symlink")
                mytxt = "%s:" % (purple(msg), )
                self._entropy.output(mytxt,
                                     importance=1,
                                     level="warning",
                                     header=brown("   ## "))
                self._entropy.output(tofile,
                                     level="warning",
                                     header=brown("   ## "))
                protected = False

        if not protected:
            return in_mask, protected, tofile, do_continue

        ##                  ##
        # file is protected  #
        ##__________________##

        # check if protection is disabled for this element
        if tofile in protectskip:
            self._entropy.logger.log(
                "[Package]",
                etpConst['logging']['normal_loglevel_id'],
                "Skipping config file installation/removal, " \
                "as stated in client.conf: %s" % (tofile,)
            )
            if not do_quiet:
                mytxt = "%s: %s" % (
                    _("Skipping file installation/removal"),
                    tofile,
                )
                self._entropy.output(mytxt,
                                     importance=1,
                                     level="warning",
                                     header=darkred("   ## "))
            do_continue = True
            return in_mask, protected, tofile, do_continue

        ##                      ##
        # file is protected (2)  #
        ##______________________##

        prot_status = True
        if do_allocation_check:
            spm_class = self._entropy.Spm_class()
            tofile, prot_status = spm_class.allocate_protected_file(
                fromfile, tofile)

        if not prot_status:
            # a protected file with the same content
            # is already in place, so not going to protect
            # the same file twice
            protected = False
            return in_mask, protected, tofile, do_continue

        ##                      ##
        # file is protected (3)  #
        ##______________________##

        oldtofile = tofile
        if oldtofile.find("._cfg") != -1:
            oldtofile = os.path.join(os.path.dirname(oldtofile),
                                     os.path.basename(oldtofile)[10:])

        if not do_quiet:
            self._entropy.logger.log(
                "[Package]", etpConst['logging']['normal_loglevel_id'],
                "Protecting config file: %s" % (oldtofile, ))
            mytxt = red("%s: %s") % (
                _("Protecting config file"),
                oldtofile,
            )
            self._entropy.output(mytxt,
                                 importance=1,
                                 level="warning",
                                 header=darkred("   ## "))

        return in_mask, protected, tofile, do_continue
コード例 #39
0
ファイル: dump.py プロジェクト: B-Rich/entropy
def dumpobj(name, my_object, complete_path = False, ignore_exceptions = True,
    dump_dir = None, custom_permissions = None):
    """
    Dump pickable object to file

    @param name: name of the object
    @type name: string
    @param my_object: object to dump
    @type my_object: any Python "pickable" object
    @keyword complete_path: consider "name" argument as
        a complete path (this overrides the default dump
        path given by etpConst['dumpstoragedir'])
    @type complete_path: bool
    @keyword ignore_exceptions: ignore any possible exception
        (EOFError, IOError, OSError,)
    @type ignore_exceptions: bool
    @keyword dump_dir: alternative dump directory
    @type dump_dir: string
    @keyword custom_permissions: give custom permission bits
    @type custom_permissions: octal
    @return: None
    @rtype: None
    @raise EOFError: could be caused by pickle.dump, ignored if
        ignore_exceptions is True
    @raise IOError: could be caused by pickle.dump, ignored if
        ignore_exceptions is True
    @raise OSError: could be caused by pickle.dump, ignored if
        ignore_exceptions is True
    """
    if dump_dir is None:
        dump_dir = D_DIR
    if custom_permissions is None:
        custom_permissions = 0o664

    while True: # trap ctrl+C
        tmp_fd, tmp_dmpfile = None, None
        try:
            if complete_path:
                dmpfile = name
                c_dump_dir = os.path.dirname(name)
            else:
                _dmp_path = os.path.join(dump_dir, name)
                dmpfile = _dmp_path+D_EXT
                c_dump_dir = os.path.dirname(_dmp_path)

            my_dump_dir = c_dump_dir
            d_paths = []
            while not os.path.isdir(my_dump_dir):
                d_paths.append(my_dump_dir)
                my_dump_dir = os.path.dirname(my_dump_dir)
            if d_paths:
                d_paths = sorted(d_paths)
                for d_path in d_paths:
                    os.mkdir(d_path)
                    const_setup_file(d_path, E_GID, 0o775)

            dmp_name = os.path.basename(dmpfile)
            tmp_fd, tmp_dmpfile = const_mkstemp(
                dir=c_dump_dir, prefix=dmp_name)
            # WARNING: it has been observed that using
            # os.fdopen() below in multi-threaded scenarios
            # is causing EBADF. There is probably a race
            # condition down in the stack.
            with open(tmp_dmpfile, "wb") as dmp_f:
                if const_is_python3():
                    pickle.dump(my_object, dmp_f,
                        protocol = COMPAT_PICKLE_PROTOCOL, fix_imports = True)
                else:
                    pickle.dump(my_object, dmp_f)

            const_setup_file(tmp_dmpfile, E_GID, custom_permissions)
            os.rename(tmp_dmpfile, dmpfile)

        except RuntimeError:
            try:
                os.remove(dmpfile)
            except OSError:
                pass
        except (EOFError, IOError, OSError):
            if not ignore_exceptions:
                raise
        finally:
            if tmp_fd is not None:
                try:
                    os.close(tmp_fd)
                except (IOError, OSError):
                    pass
            if tmp_dmpfile is not None:
                try:
                    os.remove(tmp_dmpfile)
                except (IOError, OSError):
                    pass
        break
コード例 #40
0
ファイル: skel.py プロジェクト: skwerlman/entropy
    def allocate_protected_file(package_file_path, destination_file_path):
        """
        Allocate a configuration protected file. This method returns a new
        destination_file_path value that is used by Entropy Client code to
        merge file at package_file_path to live system.
        This method offers basic support for Entropy ability to protect user
        configuration files against overwrites. Any subclass can hook code
        here in order to trigger extra actions on every acknowledged
        path modification.

        @param package_file_path: a valid file path pointing to the file
            that Entropy Client is going to move to destination_file_path
        @type package_file_path: string
        @param destination_file_path: the default destination path for given
            package_file_path. It points to the live system.
        @type destination_file_path: string
        @return: Tuple (of length 2) composed by (1) a new destination file
            path. Please note that it can be the same of the one passed
            (destination_file_path) if no protection is taken (for eg. when
            md5 of proposed_file_path and destination_file_path is the same)
            and (2) a bool informing if the function actually protected the
            destination file. Unfortunately, the bool bit is stil required
            in order to provide a valid new destination_file_path in any case.
        @rtype tuple
        """
        pkg_path_os = package_file_path
        dest_path_os = destination_file_path
        if not const_is_python3():
            pkg_path_os = const_convert_to_rawstring(package_file_path)
            dest_path_os = const_convert_to_rawstring(destination_file_path)

        if os.path.isfile(dest_path_os) and \
            os.path.isfile(pkg_path_os):
            old = entropy.tools.md5sum(package_file_path)
            new = entropy.tools.md5sum(destination_file_path)
            if old == new:
                return destination_file_path, False

        dest_dirname = os.path.dirname(destination_file_path)
        dest_basename = os.path.basename(destination_file_path)

        counter = -1
        newfile = ""
        newfile_os = newfile
        previousfile = ""
        previousfile_os = previousfile
        while True:

            counter += 1
            txtcounter = str(counter)
            oldtxtcounter = str(counter - 1)
            txtcounter_len = 4 - len(txtcounter)
            cnt = 0

            while cnt < txtcounter_len:
                txtcounter = "0" + txtcounter
                oldtxtcounter = "0" + oldtxtcounter
                cnt += 1

            newfile = os.path.join(
                dest_dirname, "._cfg%s_%s" % (
                    txtcounter,
                    dest_basename,
                ))
            if counter > 0:
                previousfile = os.path.join(
                    dest_dirname, "._cfg%s_%s" % (
                        oldtxtcounter,
                        dest_basename,
                    ))
            else:
                previousfile = os.path.join(dest_dirname,
                                            "._cfg0000_%s" % (dest_basename, ))

            newfile_os = newfile
            if not const_is_python3():
                newfile_os = const_convert_to_rawstring(newfile)

            previousfile_os = previousfile
            if not const_is_python3():
                previousfile_os = const_convert_to_rawstring(previousfile)

            if not os.path.lexists(newfile_os):
                break

        if not newfile:
            newfile = os.path.join(dest_dirname,
                                   "._cfg0000_%s" % (dest_basename, ))
        else:

            if os.path.exists(previousfile_os):

                # compare package_file_path with previousfile
                new = entropy.tools.md5sum(package_file_path)
                old = entropy.tools.md5sum(previousfile)
                if new == old:
                    return previousfile, False

                # compare old and new, if they match,
                # suggest previousfile directly
                new = entropy.tools.md5sum(destination_file_path)
                old = entropy.tools.md5sum(previousfile)
                if new == old:
                    return previousfile, False

        return newfile, True
コード例 #41
0
ファイル: client.py プロジェクト: skwerlman/entropy
    def _generic_post_handler(self, function_name, params, file_params,
                              timeout):
        """
        Given a function name and the request data (dict format), do the actual
        HTTP request and return the response object to caller.
        WARNING: params and file_params dict keys must be ASCII string only.

        @param function_name: name of the function that called this method
        @type function_name: string
        @param params: POST parameters
        @type params: dict
        @param file_params: mapping composed by file names as key and tuple
            composed by (file_name, file object) as values
        @type file_params: dict
        @param timeout: socket timeout
        @type timeout: float
        @return: tuple composed by the server response string or None
            (in case of empty response) and the HTTPResponse object (useful
                for checking response status)
        @rtype: tuple
        """
        if timeout is None:
            timeout = self._default_timeout_secs
        multipart_boundary = "---entropy.services,boundary---"
        request_path = self._request_path.rstrip("/") + "/" + function_name
        const_debug_write(
            __name__,
            "WebService _generic_post_handler, calling: %s at %s -- %s,"
            " tx_callback: %s, timeout: %s" % (
                self._request_host,
                request_path,
                params,
                self._transfer_callback,
                timeout,
            ))
        connection = None
        try:
            if self._request_protocol == "http":
                connection = httplib.HTTPConnection(self._request_host,
                                                    timeout=timeout)
            elif self._request_protocol == "https":
                connection = httplib.HTTPSConnection(self._request_host,
                                                     timeout=timeout)
            else:
                raise WebService.RequestError("invalid request protocol",
                                              method=function_name)

            headers = {
                "Accept": "text/plain",
                "User-Agent": self._generate_user_agent(function_name),
            }

            if file_params is None:
                file_params = {}
            # autodetect file parameters in params
            for k in list(params.keys()):
                if isinstance(params[k], (tuple, list)) \
                    and (len(params[k]) == 2):
                    f_name, f_obj = params[k]
                    if isinstance(f_obj, file):
                        file_params[k] = params[k]
                        del params[k]
                elif const_isunicode(params[k]):
                    # convert to raw string
                    params[k] = const_convert_to_rawstring(
                        params[k], from_enctype="utf-8")
                elif not const_isstring(params[k]):
                    # invalid ?
                    if params[k] is None:
                        # will be converted to ""
                        continue
                    int_types = const_get_int()
                    supported_types = (float, list, tuple) + int_types
                    if not isinstance(params[k], supported_types):
                        raise WebService.UnsupportedParameters(
                            "%s is unsupported type %s" % (k, type(params[k])))
                    list_types = (list, tuple)
                    if isinstance(params[k], list_types):
                        # not supporting nested lists
                        non_str = [x for x in params[k] if not \
                            const_isstring(x)]
                        if non_str:
                            raise WebService.UnsupportedParameters(
                                "%s is unsupported type %s" %
                                (k, type(params[k])))

            body = None
            if not file_params:
                headers["Content-Type"] = "application/x-www-form-urlencoded"
                encoded_params = urllib_parse.urlencode(params)
                data_size = len(encoded_params)
                if self._transfer_callback is not None:
                    self._transfer_callback(0, data_size, False)

                if data_size < 65536:
                    try:
                        connection.request("POST", request_path,
                                           encoded_params, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                                                      method=function_name)
                else:
                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                                                      method=function_name)
                    sio = StringIO(encoded_params)
                    data_size = len(encoded_params)
                    while True:
                        chunk = sio.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                                          method=function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(sio.tell(), data_size,
                                                    False)
                # for both ways, send a signal through the callback
                if self._transfer_callback is not None:
                    self._transfer_callback(data_size, data_size, False)

            else:
                headers["Content-Type"] = "multipart/form-data; boundary=" + \
                    multipart_boundary
                body_file, body_fpath = self._encode_multipart_form(
                    params, file_params, multipart_boundary)
                try:
                    data_size = body_file.tell()
                    headers["Content-Length"] = str(data_size)
                    body_file.seek(0)
                    if self._transfer_callback is not None:
                        self._transfer_callback(0, data_size, False)

                    try:
                        connection.request("POST", request_path, None, headers)
                    except socket.error as err:
                        raise WebService.RequestError(err,
                                                      method=function_name)
                    while True:
                        chunk = body_file.read(65535)
                        if not chunk:
                            break
                        try:
                            connection.send(chunk)
                        except socket.error as err:
                            raise WebService.RequestError(err,
                                                          method=function_name)
                        if self._transfer_callback is not None:
                            self._transfer_callback(body_file.tell(),
                                                    data_size, False)
                    if self._transfer_callback is not None:
                        self._transfer_callback(data_size, data_size, False)
                finally:
                    body_file.close()
                    os.remove(body_fpath)

            try:
                response = connection.getresponse()
            except socket.error as err:
                raise WebService.RequestError(err, method=function_name)
            const_debug_write(
                __name__, "WebService.%s(%s), "
                "response header: %s" % (
                    function_name,
                    params,
                    response.getheaders(),
                ))
            total_length = response.getheader("Content-Length", "-1")
            try:
                total_length = int(total_length)
            except ValueError:
                total_length = -1
            outcome = const_convert_to_rawstring("")
            current_len = 0
            if self._transfer_callback is not None:
                self._transfer_callback(current_len, total_length, True)
            while True:
                try:
                    chunk = response.read(65536)
                except socket.error as err:
                    raise WebService.RequestError(err, method=function_name)
                if not chunk:
                    break
                outcome += chunk
                current_len += len(chunk)
                if self._transfer_callback is not None:
                    self._transfer_callback(current_len, total_length, True)

            if self._transfer_callback is not None:
                self._transfer_callback(total_length, total_length, True)

            if const_is_python3():
                outcome = const_convert_to_unicode(outcome)
            if not outcome:
                return None, response
            return outcome, response

        except httplib.HTTPException as err:
            raise WebService.RequestError(err, method=function_name)
        finally:
            if connection is not None:
                connection.close()
コード例 #42
0
ファイル: dump.py プロジェクト: skwerlman/entropy
def dumpobj(name,
            my_object,
            complete_path=False,
            ignore_exceptions=True,
            dump_dir=None,
            custom_permissions=None):
    """
    Dump pickable object to file

    @param name: name of the object
    @type name: string
    @param my_object: object to dump
    @type my_object: any Python "pickable" object
    @keyword complete_path: consider "name" argument as
        a complete path (this overrides the default dump
        path given by etpConst['dumpstoragedir'])
    @type complete_path: bool
    @keyword ignore_exceptions: ignore any possible exception
        (EOFError, IOError, OSError,)
    @type ignore_exceptions: bool
    @keyword dump_dir: alternative dump directory
    @type dump_dir: string
    @keyword custom_permissions: give custom permission bits
    @type custom_permissions: octal
    @return: None
    @rtype: None
    @raise EOFError: could be caused by pickle.dump, ignored if
        ignore_exceptions is True
    @raise IOError: could be caused by pickle.dump, ignored if
        ignore_exceptions is True
    @raise OSError: could be caused by pickle.dump, ignored if
        ignore_exceptions is True
    """
    if dump_dir is None:
        dump_dir = D_DIR
    if custom_permissions is None:
        custom_permissions = 0o664

    while True:  # trap ctrl+C
        tmp_fd, tmp_dmpfile = None, None
        try:
            if complete_path:
                dmpfile = name
                c_dump_dir = os.path.dirname(name)
            else:
                _dmp_path = os.path.join(dump_dir, name)
                dmpfile = _dmp_path + D_EXT
                c_dump_dir = os.path.dirname(_dmp_path)

            my_dump_dir = c_dump_dir
            d_paths = []
            while not os.path.isdir(my_dump_dir):
                d_paths.append(my_dump_dir)
                my_dump_dir = os.path.dirname(my_dump_dir)
            if d_paths:
                d_paths = sorted(d_paths)
                for d_path in d_paths:
                    os.mkdir(d_path)
                    const_setup_file(d_path, E_GID, 0o775)

            dmp_name = os.path.basename(dmpfile)
            tmp_fd, tmp_dmpfile = const_mkstemp(dir=c_dump_dir,
                                                prefix=dmp_name)
            # WARNING: it has been observed that using
            # os.fdopen() below in multi-threaded scenarios
            # is causing EBADF. There is probably a race
            # condition down in the stack.
            with open(tmp_dmpfile, "wb") as dmp_f:
                if const_is_python3():
                    pickle.dump(my_object,
                                dmp_f,
                                protocol=COMPAT_PICKLE_PROTOCOL,
                                fix_imports=True)
                else:
                    pickle.dump(my_object, dmp_f)

            const_setup_file(tmp_dmpfile, E_GID, custom_permissions)
            os.rename(tmp_dmpfile, dmpfile)

        except RuntimeError:
            try:
                os.remove(dmpfile)
            except OSError:
                pass
        except (EOFError, IOError, OSError):
            if not ignore_exceptions:
                raise
        finally:
            if tmp_fd is not None:
                try:
                    os.close(tmp_fd)
                except (IOError, OSError):
                    pass
            if tmp_dmpfile is not None:
                try:
                    os.remove(tmp_dmpfile)
                except (IOError, OSError):
                    pass
        break
コード例 #43
0
ファイル: _manage.py プロジェクト: B-Rich/entropy
    def _handle_config_protect(self, protect, mask, protectskip,
                               fromfile, tofile,
                               do_allocation_check = True,
                               do_quiet = False):
        """
        Handle configuration file protection. This method contains the logic
        for determining if a file should be protected from overwrite.
        """
        protected = False
        do_continue = False
        in_mask = False

        tofile_os = tofile
        fromfile_os = fromfile
        if not const_is_python3():
            tofile_os = const_convert_to_rawstring(tofile)
            fromfile_os = const_convert_to_rawstring(fromfile)

        if tofile in protect:
            protected = True
            in_mask = True

        elif os.path.dirname(tofile) in protect:
            protected = True
            in_mask = True

        else:
            tofile_testdir = os.path.dirname(tofile)
            old_tofile_testdir = None
            while tofile_testdir != old_tofile_testdir:
                if tofile_testdir in protect:
                    protected = True
                    in_mask = True
                    break
                old_tofile_testdir = tofile_testdir
                tofile_testdir = os.path.dirname(tofile_testdir)

        if protected: # check if perhaps, file is masked, so unprotected

            if tofile in mask:
                protected = False
                in_mask = False

            elif os.path.dirname(tofile) in mask:
                protected = False
                in_mask = False

            else:
                tofile_testdir = os.path.dirname(tofile)
                old_tofile_testdir = None
                while tofile_testdir != old_tofile_testdir:
                    if tofile_testdir in mask:
                        protected = False
                        in_mask = False
                        break
                    old_tofile_testdir = tofile_testdir
                    tofile_testdir = os.path.dirname(tofile_testdir)

        if not os.path.lexists(tofile_os):
            protected = False # file doesn't exist

        # check if it's a text file
        if protected:
            protected = entropy.tools.istextfile(tofile)
            in_mask = protected

        if fromfile is not None:
            if protected and os.path.lexists(fromfile_os) and (
                    not os.path.exists(fromfile_os)) and (
                        os.path.islink(fromfile_os)):
                # broken symlink, don't protect
                self._entropy.logger.log(
                    "[Package]",
                    etpConst['logging']['normal_loglevel_id'],
                    "WARNING!!! Failed to handle file protection for: " \
                    "%s, broken symlink in package" % (
                        tofile,
                    )
                )
                msg = _("Cannot protect broken symlink")
                mytxt = "%s:" % (
                    purple(msg),
                )
                self._entropy.output(
                    mytxt,
                    importance = 1,
                    level = "warning",
                    header = brown("   ## ")
                )
                self._entropy.output(
                    tofile,
                    level = "warning",
                    header = brown("   ## ")
                )
                protected = False

        if not protected:
            return in_mask, protected, tofile, do_continue

        ##                  ##
        # file is protected  #
        ##__________________##

        # check if protection is disabled for this element
        if tofile in protectskip:
            self._entropy.logger.log(
                "[Package]",
                etpConst['logging']['normal_loglevel_id'],
                "Skipping config file installation/removal, " \
                "as stated in client.conf: %s" % (tofile,)
            )
            if not do_quiet:
                mytxt = "%s: %s" % (
                    _("Skipping file installation/removal"),
                    tofile,
                )
                self._entropy.output(
                    mytxt,
                    importance = 1,
                    level = "warning",
                    header = darkred("   ## ")
                )
            do_continue = True
            return in_mask, protected, tofile, do_continue

        ##                      ##
        # file is protected (2)  #
        ##______________________##

        prot_status = True
        if do_allocation_check:
            spm_class = self._entropy.Spm_class()
            tofile, prot_status = spm_class.allocate_protected_file(fromfile,
                tofile)

        if not prot_status:
            # a protected file with the same content
            # is already in place, so not going to protect
            # the same file twice
            protected = False
            return in_mask, protected, tofile, do_continue

        ##                      ##
        # file is protected (3)  #
        ##______________________##

        oldtofile = tofile
        if oldtofile.find("._cfg") != -1:
            oldtofile = os.path.join(os.path.dirname(oldtofile),
                os.path.basename(oldtofile)[10:])

        if not do_quiet:
            self._entropy.logger.log(
                "[Package]",
                etpConst['logging']['normal_loglevel_id'],
                "Protecting config file: %s" % (oldtofile,)
            )
            mytxt = red("%s: %s") % (_("Protecting config file"), oldtofile,)
            self._entropy.output(
                mytxt,
                importance = 1,
                level = "warning",
                header = darkred("   ## ")
            )

        return in_mask, protected, tofile, do_continue
コード例 #44
0
ファイル: conf.py プロジェクト: Sabayon/entropy
    @contact: [email protected]
    @copyright: Fabio Erculiani
    @license: GPL-2

    B{Entropy Command Line Client}.

"""
import os
import errno
import sys
import argparse
import subprocess

from entropy.const import const_convert_to_unicode, const_is_python3, \
    const_mkstemp
if const_is_python3():
    from subprocess import getoutput
else:
    from commands import getoutput

from entropy.i18n import _
from entropy.output import readtext, darkgreen, brown, teal, purple, \
    blue, darkred

import entropy.tools

from _entropy.solo.commands.descriptor import SoloCommandDescriptor
from _entropy.solo.commands.command import SoloCommand


class SoloConf(SoloCommand):
コード例 #45
0
ファイル: xpaktools.py プロジェクト: dMaggot/entropy
def suck_xpak(tbz2file, xpakpath):
    """
    docstring_title

    @param tbz2file: 
    @type tbz2file: 
    @param xpakpath: 
    @type xpakpath: 
    @return: 
    @rtype: 
    """
    old = open(tbz2file, "rb")
    db = open(xpakpath, "wb")
    try:
        # position old to the end
        old.seek(0, os.SEEK_END)
        # read backward until we find
        n_bytes = old.tell()
        counter = n_bytes - 1
        if const_is_python3():
            xpak_end = b"XPAKSTOP"
            xpak_start = b"XPAKPACK"
            xpak_entry_point = b"X"
        else:
            xpak_end = "XPAKSTOP"
            xpak_start = "XPAKPACK"
            xpak_entry_point = "X"

        xpak_tag_len = len(xpak_start)
        chunk_len = 3
        data_start_position = None
        data_end_position = None

        while counter >= (0 - chunk_len):

            old.seek(counter - n_bytes, os.SEEK_END)
            if (n_bytes - (abs(counter - n_bytes))) < chunk_len:
                chunk_len = 1
            read_bytes = old.read(chunk_len)
            read_len = len(read_bytes)

            entry_idx = read_bytes.rfind(xpak_entry_point)
            if entry_idx != -1:

                cut_gotten = read_bytes[entry_idx:]
                offset = xpak_tag_len - len(cut_gotten)
                chunk = cut_gotten + old.read(offset)

                if (chunk == xpak_end) and (data_start_position is None):
                    data_end_position = old.tell()

                elif (chunk == xpak_start) and (data_end_position is not None):
                    data_start_position = old.tell() - xpak_tag_len
                    break

            counter -= read_len

        if data_start_position is None:
            return False
        if data_end_position is None:
            return False

        # now write to found metadata to file
        # starting from data_start_position
        # ending to data_end_position
        old.seek(data_start_position)
        to_read = data_end_position - data_start_position
        while to_read > 0:
            data = old.read(to_read)
            db.write(data)
            to_read -= len(data)
        return True

    finally:
        old.close()
        db.flush()
        db.close()
コード例 #46
0
    @contact: [email protected]
    @copyright: Fabio Erculiani
    @license: GPL-2

    B{Entropy Command Line Client}.

"""
import os
import errno
import sys
import argparse
import subprocess

from entropy.const import const_convert_to_unicode, const_is_python3, \
    const_mkstemp
if const_is_python3():
    from subprocess import getoutput
else:
    from commands import getoutput

from entropy.i18n import _
from entropy.output import readtext, darkgreen, brown, teal, purple, \
    blue, darkred

import entropy.tools

from solo.commands.descriptor import SoloCommandDescriptor
from solo.commands.command import SoloCommand


class SoloConf(SoloCommand):
コード例 #47
0
def suck_xpak(tbz2file, xpakpath):
    """
    docstring_title

    @param tbz2file: 
    @type tbz2file: 
    @param xpakpath: 
    @type xpakpath: 
    @return: 
    @rtype: 
    """
    old, db = None, None
    try:
        old = open(tbz2file, "rb")
        db = open(xpakpath, "wb")
        # position old to the end
        old.seek(0, os.SEEK_END)
        # read backward until we find
        n_bytes = old.tell()
        counter = n_bytes - 1
        if const_is_python3():
            xpak_end = b"XPAKSTOP"
            xpak_start = b"XPAKPACK"
            xpak_entry_point = b"X"
        else:
            xpak_end = "XPAKSTOP"
            xpak_start = "XPAKPACK"
            xpak_entry_point = "X"

        xpak_tag_len = len(xpak_start)
        chunk_len = 3
        data_start_position = None
        data_end_position = None

        while counter >= (0 - chunk_len):

            old.seek(counter - n_bytes, os.SEEK_END)
            if (n_bytes - (abs(counter - n_bytes))) < chunk_len:
                chunk_len = 1
            read_bytes = old.read(chunk_len)
            read_len = len(read_bytes)

            entry_idx = read_bytes.rfind(xpak_entry_point)
            if entry_idx != -1:

                cut_gotten = read_bytes[entry_idx:]
                offset = xpak_tag_len - len(cut_gotten)
                chunk = cut_gotten + old.read(offset)

                if (chunk == xpak_end) and (data_start_position is None):
                    data_end_position = old.tell()

                elif (chunk == xpak_start) and (data_end_position is not None):
                    data_start_position = old.tell() - xpak_tag_len
                    break

            counter -= read_len

        if data_start_position is None:
            return False
        if data_end_position is None:
            return False

        # now write to found metadata to file
        # starting from data_start_position
        # ending to data_end_position
        old.seek(data_start_position)
        to_read = data_end_position - data_start_position
        while to_read > 0:
            data = old.read(to_read)
            db.write(data)
            to_read -= len(data)
        return True

    finally:
        if old is not None:
            old.close()
        if db is not None:
            db.close()