Exemple #1
0
def read_dataset(data_dir):
    pickle_filename = "humanseg.pickle"
    pickle_filepath = os.path.join(data_dir, pickle_filename)
    if not os.path.exists(pickle_filepath):
        pickle_filename = "humanseg.pickle"
        pickle_filepath = os.path.join(data_dir, pickle_filename)
        result = create_image_lists(data_dir)
        print ("Pickling ...")
        with open(pickle_filepath, 'wb') as f:
            pickle.dump(result, f)
    else:
        print ("Found pickle file!")
    open(pickle_filepath, 'a').close()
    scores = {}
    try:
        with open(pickle_filepath, "rb") as file:
            unpickler = pickle.Unpickler(file)
            scores = unpickler.load()
            if not isinstance(scores, dict):
                scores = {}
    except EOFError:
        return {}
    with open(pickle_filepath, 'rb') as f:
        result = pickle.load(f)
        training_records = result['training']
        validation_records = result['val']
        del result
    # print(training_records[:10])
    return training_records, validation_records
Exemple #2
0
def python_memcache_deserializer(key, value, flags):
    if flags == 0:
        return value

    elif flags & FLAG_TEXT:
        return value.decode('utf8')

    elif flags & FLAG_INTEGER:
        return int(value)

    elif flags & FLAG_LONG:
        if six.PY3:
            return int(value)
        else:
            return long_type(value)

    elif flags & FLAG_PICKLE:
        try:
            buf = BytesIO(value)
            unpickler = pickle.Unpickler(buf)
            return unpickler.load()
        except Exception:
            logging.info('Pickle error', exc_info=True)
            return None

    return value
Exemple #3
0
def _load_old_object(directory):
	# type: (str) -> Tuple[str, Dict[str, List[bytes]]]
	with open(os.path.join(directory, 'tmp', 'old_dn'), 'rb') as fd:
		p = pickle.Unpickler(fd)
		(old_dn, old_object) = p.load()

	return (old_dn, old_object)
def _load(f, constructors=None):
    '''
    constructors: A dictionary mapping strings to callables, which are
      consulted as additional constructors during unpickling. This is useful
      for interposing an alternative constructor, or for supporting class
      names which have been changed. This is implemented via the cPickle
      `Unpickler`'s `find_global` attribute, which is documented here,
      albeit quite densely:
      https://docs.python.org/2/library/pickle.html#subclassing-unpicklers
    '''
    from six.moves import cPickle as pickle
    from baiji.serialization.util.importlib import class_from_str

    def unpickler_find_global(module_name, class_name):
        fully_qualified_name = '{}.{}'.format(module_name, class_name)
        try:
            return constructors[fully_qualified_name]
        except KeyError:
            # It would be tempting to delegate to the original `find_global`.
            # Unfortunately, `Unpickler` does not expose the default
            # implementation. Thanks, Obama.
            return class_from_str(fully_qualified_name)

    unpickler = pickle.Unpickler(f)
    if constructors:
        unpickler.find_global = unpickler_find_global
    return unpickler.load()
Exemple #5
0
    def verify(self, key, tolerance=0.0):
        """
        Verify data associated with the given key, with the specified tolerance. The
        key is internally cast to a string.
        """

        key = str(key)
        if not key in self.__filenames:
            raise CheckpointException("Missing checkpoint with key %s" % key)
        if not isinstance(tolerance, float) or tolerance < 0.0:
            raise InvalidArgumentException(
                "tolerance must be a non-negative float")
        handle = open(self.__filename(key), "rb")
        pickler = pickle.Unpickler(handle)
        c_cs = pickler.load()

        try:
            id_map = self.__id_map[key]
            for c_id in c_cs:
                c = id_map[c_id]
                self._Checkpointer__verify(c, c_cs[c_id], tolerance=tolerance)
            dolfin.info("Verified checkpoint with key %s" % key)
        except CheckpointException as e:
            dolfin.info(str(e))
            raise CheckpointException(
                "Failed to verify checkpoint with key %s" % key)

        return
Exemple #6
0
    def restore(self, key, cs=None):
        """
        Restore Constant s and Function s with the given key. If cs is supplied,
        only restore Constant s and Function s found in cs. The key is internally
        cast to a string.
        """

        key = str(key)
        if not key in self.__filenames:
            raise CheckpointException("Missing checkpoint with key %s" % key)
        if not cs is None:
            cs = self._Checkpointer__check_cs(cs)
            cs = [c.id() for c in cs]

        handle = open(self.__filename(key), "rb")
        pickler = pickle.Unpickler(handle)
        c_cs = pickler.load()
        if cs is None:
            cs = list(c_cs.keys())

        id_map = self.__id_map[key]
        for c_id in cs:
            c = id_map[c_id]
            self._Checkpointer__unpack(c, c_cs[c_id])

        return
Exemple #7
0
def _unpickle_iterable(pickle_fh):
    with pickle_fh:
        unpklr = pickle.Unpickler(pickle_fh)
        try:
            while True:
                yield unpklr.load()
        except EOFError:
            pass
Exemple #8
0
def load_old(old):
    # type: (dict) -> dict
    if os.path.exists(DOVECOT_OLD_PICKLE):
        with open(DOVECOT_OLD_PICKLE, "rb") as fd:
            p = pickle.Unpickler(fd)
            old = p.load()
        os.unlink(DOVECOT_OLD_PICKLE)
        return old
    else:
        return old
Exemple #9
0
    def test_broken_pickle_with_shared(self):
        saves = []

        def pers_save(obj):
            if isinstance(obj, numpy.ndarray):
                saves.append(obj)
                return len(saves) - 1
            else:
                return None

        def pers_load(id):
            return saves[id]

        a = numpy.random.rand(4, 5)
        b = numpy.random.rand(5, 4)

        x = theano.tensor.matrix()
        y = theano.shared(b)

        f = theano.function([x], theano.tensor.dot(x, y))

        from theano.compat import BytesIO
        fp = BytesIO()
        p = pickle.Pickler(fp, 2)
        p.persistent_id = pers_save
        try:
            p.dump(f)
        except NotImplementedError as e:
            if exc_message(e).startswith('DebugMode is not picklable'):
                return
            else:
                raise
        fp2 = BytesIO(fp.getvalue())
        fp.close()
        p = pickle.Unpickler(fp2)
        p.persistent_load = pers_load
        f2 = p.load()
        fp2.close()
Exemple #10
0
def load_datasets(pickle_filename):
    with open(pickle_filename, 'rb') as f:
        return (pickle.Unpickler(f).load())
Exemple #11
0
 def loads(cls, s):
     obj = pickle.Unpickler(BytesIO(s))
     obj.find_global = cls.find_class
     return obj.load()
Exemple #12
0
def handler(dn, new, old, command):
    # type: (str, Optional[Dict[str, List[bytes]]], Optional[Dict[str, List[bytes]]], str) -> None
    if os.path.exists(FETCHMAIL_OLD_PICKLE):
        with open(FETCHMAIL_OLD_PICKLE, 'r') as fd:
            p = pickle.Unpickler(fd)
            old = p.load()
        os.unlink(FETCHMAIL_OLD_PICKLE)
    if command == 'r':
        with open(FETCHMAIL_OLD_PICKLE, 'w+') as fd:
            os.chmod(FETCHMAIL_OLD_PICKLE, 0o600)
            p = pickle.Pickler(fd)
            old = p.dump(old)
            p.clear_memo()

    flist = load_rc(fn_fetchmailrc)
    if old and not new and not command == 'r':
        # object has been deleted ==> remove entry from rc file
        flist = objdelete(flist, old)
        write_rc(flist, fn_fetchmailrc)

    elif old and new and details_complete(old) and not details_complete(new):
        # data is now incomplete ==> remove entry from rc file
        flist = objdelete(flist, old)
        write_rc(flist, fn_fetchmailrc)

    elif new and details_complete(new):
        # obj has been created or modified
        passwd = None
        if old:
            # old exists ==> object has been modified ==> get old password and remove object entry from rc file
            passwd = get_pw_from_rc(flist, old['uid'][0].decode('UTF-8'))
            flist = objdelete(flist, old)

        if not details_complete(new, incl_password=True):
            if only_password_reset(old, new):
                ud.debug(ud.LISTENER, ud.INFO,
                         'fetchmail: password has been reset - nothing to do')
                # only password has been reset ==> nothing to do
                return

            # new obj does not contain password
            if passwd:
                # passwd has been set in old ==> use old password
                ud.debug(ud.LISTENER, ud.INFO, 'fetchmail: using old password')
                objappend(flist, new, passwd)
                write_rc(flist, fn_fetchmailrc)
            else:
                ud.debug(
                    ud.LISTENER, ud.ERROR,
                    'fetchmail: user "%s": no password set in old and new' %
                    new['uid'][0])
        else:
            # new obj contains password ==> use new password
            objappend(flist, new)
            write_rc(flist, fn_fetchmailrc)

            ud.debug(ud.LISTENER, ud.INFO, 'fetchmail: using new password')

            configRegistry = univention.config_registry.ConfigRegistry()
            configRegistry.load()

            listener.setuid(0)
            try:
                lo = univention.uldap.getMachineConnection()
                modlist = [('univentionFetchmailPasswd',
                            new['univentionFetchmailPasswd'][0], b"")]
                lo.modify(dn, modlist)
                ud.debug(ud.LISTENER, ud.INFO,
                         'fetchmail: reset password successfully')
            except Exception as exc:
                ud.debug(
                    ud.LISTENER, ud.ERROR,
                    'fetchmail: cannot reset password in LDAP (%s): %s' %
                    (dn, exc))
            finally:
                listener.unsetuid()