Ejemplo n.º 1
0
    def temp_store_data(self, inv_UB, outdir='./', W=np.eye(3)):
        k_i = get_k(self.incident_energy)
        sin_psi, cos_psi = math.sin(self.psi), math.cos(self.psi)
        spec_to_uv = np.array([[ cos_psi, sin_psi, 0], \
                               [-sin_psi, cos_psi, 0], \
                               [       0,       0, 1]])
        spec_to_rlu = np.matmul(inv_UB, spec_to_uv)

        for i in range(self.ne):
            k_f = get_k(self.incident_energy - self.ph_energy_centers[i])
            theta, phi = self.polar, self.azimuthal
            I_col = np.expand_dims(self.intensity[:, i], axis=1)

            sin_theta = np.sin(theta)
            ex = np.cos(theta)
            ey = sin_theta * np.cos(phi)
            ez = sin_theta * np.sin(phi)
            q1 = k_i - ex * k_f
            q2 = -ey * k_f
            q3 = -ez * k_f
            Q_in_rlu = np.matmul(spec_to_rlu, np.vstack((q1, q2, q3))).T
            if not np.allclose(W, np.eye(3)):
                inv_W = inv(W)
                Q_in_rlu = np.matmul(inv_W, Q_in_rlu.T).T

            E_col = np.full((Q_in_rlu.shape[0], 1),
                            self.ph_energy_boundries[i])
            #with open(fname, 'ab+') as f:
            fname = outdir + ('QEI')
            #            with gzip.GzipFile(fname, 'ab+') as f:
            with open(fname, 'ab+') as f:
                QEI_pickle = pickle.dump(np.hstack((Q_in_rlu, E_col, I_col)),
                                         f)
                pickletools.optimize(QEI_pickle)
Ejemplo n.º 2
0
def optimize(origpickle, protocol=2):
    """
    optimizes a pickle by stripping extraenous memoizing instructions and
    embedding a zlib compressed pickle inside the pickle
    """
    data = zlib.compress(pickletools.optimize(origpickle), 9)
    ast = Import(pickle.loads if PY2 else pickle._loads)(Import(zlib.decompress)(data))
    return pickletools.optimize(dumps(ast, protocol))
Ejemplo n.º 3
0
def optimize(origpickle, protocol=2):
    """
    optimizes a pickle by stripping extraenous memoizing instructions and
    embedding a zlib compressed pickle inside the pickle
    """
    data = zlib.compress(pickletools.optimize(origpickle), 9)
    ast = Import(pickle.loads if PY2 else pickle._loads)(Import(
        zlib.decompress)(data))
    return pickletools.optimize(dumps(ast, protocol))
Ejemplo n.º 4
0
def main():
    from sys import argv
    if '--pickle' in argv:
        soft = stackless.enable_softswitch(
            False)  # no crash, if soft switching
        try:
            p = pickle_current_frame()
        finally:
            stackless.enable_softswitch(soft)
            p = pickletools.optimize(p)
        print('Pickle as bytes: ', repr(p))
    else:
        if bytes is str:
            # pickle created with Stackless v2.7.6r3, hg commt id 67088aa2da77
            p = b'\x80\x02c_stackless._wrap\nframe\nc_stackless._wrap\ncode\nq\x01(K\x00K\x01K\x03J\x03`\x03\x00U?g\x00\x00\x89\x00\x00\x87\x00\x00f\x01\x00d\x01\x00\x86\x00\x00}\x00\x00t\x00\x00j\x01\x00\x83\x00\x00j\x02\x00|\x00\x00t\x00\x00j\x03\x00f\x01\x00\x83\x02\x00j\x04\x00\x83\x00\x00\x01\x88\x00\x00d\x02\x00\x19SNh\x01(K\x01K\x01K\x04J\x13`\x03\x00U \x88\x00\x00j\x00\x00t\x01\x00j\x02\x00|\x00\x00j\x03\x00d\x01\x00\x83\x02\x00\x83\x01\x00\x01d\x00\x00SNJ\xff\xff\xff\xff\x86(U\x06appendU\x06pickleU\x05dumpsU\x05frametU\x07currentq\n\x85U)Stackless/test/unpickle_crash_ticket61.pyU\x04funcq\rK\x12U\x02\x00\x01U\x06resultq\x0f\x85)tR)bK\x00\x87(U\tstacklessU\x07taskletU\x04bindh\nU\x03runth\r\x85U)Stackless/test/unpickle_crash_ticket61.pyU\x14pickle_current_frameK\x0fU\x08\x00\x01\x06\x02\x0f\x02"\x01)h\x0f\x85tRq\x1f)b\x85R(h\x1fK\x00U\x10eval_frame_valuec__builtin__\ngetattr\nc__builtin__\n__import__\n(U\x08__main__))U\x00\x85tRU\x08__dict__\x86RK\x00}NNK3K\x0f)Ntb.'
        else:
            # pickle created with Stackless v3.3.5
            p = b'\x80\x03c_stackless._wrap\nframe\nc_stackless._wrap\ncode\nq\x01(K\x00K\x00K\x01K\x03K\x03CBg\x00\x00\x89\x00\x00\x87\x00\x00f\x01\x00d\x01\x00d\x02\x00\x86\x00\x00}\x00\x00t\x00\x00j\x01\x00\x83\x00\x00j\x02\x00|\x00\x00t\x00\x00j\x03\x00f\x01\x00\x83\x02\x00j\x04\x00\x83\x00\x00\x01\x88\x00\x00d\x03\x00\x19S(Nh\x01(K\x01K\x00K\x01K\x04K\x13C \x88\x00\x00j\x00\x00t\x01\x00j\x02\x00|\x00\x00j\x03\x00d\x02\x00\x83\x02\x00\x83\x01\x00\x01d\x00\x00SNK\x01J\xff\xff\xff\xff\x87(X\x06\x00\x00\x00appendX\x06\x00\x00\x00pickleX\x05\x00\x00\x00dumpsX\x05\x00\x00\x00frametX\x07\x00\x00\x00currentq\n\x85X)\x00\x00\x00Stackless/test/unpickle_crash_ticket61.pyq\x0cX\x04\x00\x00\x00funcq\rK\x0fC\x02\x00\x01X\x06\x00\x00\x00resultq\x0f\x85)tR)bX"\x00\x00\x00pickle_current_frame.<locals>.funcK\x00t(X\t\x00\x00\x00stacklessX\x07\x00\x00\x00taskletX\x04\x00\x00\x00bindh\nX\x03\x00\x00\x00runth\r\x85h\x0cX\x14\x00\x00\x00pickle_current_frameK\x0cC\x08\x00\x01\x06\x02\x12\x02"\x01)h\x0f\x85tRq\x1f)b\x85R(h\x1fK\x00X\x10\x00\x00\x00eval_frame_valuecbuiltins\ngetattr\ncimportlib\nimport_module\nX\x08\x00\x00\x00__main__\x85RX\x08\x00\x00\x00__dict__\x86RK\x00}NNK6K\x0c)Ntb.'

    if '--dis' in argv:
        pickletools.dis(p)
    else:
        frame = pickle.loads(p)
        frame.f_locals  # this line crashes Stackless
        print("No Crash, OK")
Ejemplo n.º 5
0
def _cached_stylesheet(path):
    package_dir = os.path.dirname(__file__)
    source_path = _check_paths(
        os.path.exists,
        [os.path.join(_PALASO_DATA, path),
         os.path.join(package_dir, path)])

    cached_path = os.path.normpath(
        os.path.join(_PALASO_DATA, path + os.extsep + 'cz'))
    if os.path.exists(cached_path):
        import glob
        if _is_fresh(cached_path, [source_path] +
                     glob.glob(os.path.join(package_dir, '*.py'))):
            return cached_path
    else:
        path = os.path.dirname(cached_path)
        if not os.path.exists(path):
            os.makedirs(path)

    import pickletools
    with contextlib.closing(bz2.BZ2File(cached_path, 'wb')) as zf:
        zf.write(
            pickletools.optimize(
                pickle.dumps(
                    style.parse(open(source_path, 'r'),
                                error_level=sfm.level.Marker))))
    return cached_path
Ejemplo n.º 6
0
 def pickle(self, form) -> str:
     """Compile to `pickle.loads`. The final fallback for `atom`."""
     # 0 is the "human-readable" backwards-compatible text protocol.
     dumps = pickletools.optimize(pickle.dumps(form, 0, fix_imports=False))
     r = repr(form).replace("\n", "\n  # ")
     nl = "\n" if "\n" in r else ""
     return f"__import__('pickle').loads({nl}  # {r}\n    {dumps!r}\n)"
Ejemplo n.º 7
0
 def set_scanno(self):
     cur = self.db.execute("INSERT INTO twitter_scans "
                           "VALUES (NULL, ?, ?, ?, ?, ?)",
                           (self.mode, self.limit, self.parallel, self.seed,
                            pickletools.optimize(pickle.dumps(self))))
     self.scanno = cur.lastrowid
     self.db.commit()
Ejemplo n.º 8
0
    def serialize(vector):
        """ Serializer a vector using pickle.

        :return bytes
        """
        return pickletools.optimize(
            pickle.dumps(vector, pickle.HIGHEST_PROTOCOL))
Ejemplo n.º 9
0
    def test_optimize_binput_and_memoize(self):
        pickled = (b'\x80\x04\x95\x15\x00\x00\x00\x00\x00\x00\x00'
                   b']\x94(\x8c\x04spamq\x01\x8c\x03ham\x94h\x02e.')
        #    0: \x80 PROTO      4
        #    2: \x95 FRAME      21
        #   11: ]    EMPTY_LIST
        #   12: \x94 MEMOIZE
        #   13: (    MARK
        #   14: \x8c     SHORT_BINUNICODE 'spam'
        #   20: q        BINPUT     1
        #   22: \x8c     SHORT_BINUNICODE 'ham'
        #   27: \x94     MEMOIZE
        #   28: h        BINGET     2
        #   30: e        APPENDS    (MARK at 13)
        #   31: .    STOP
        self.assertIn(pickle.BINPUT, pickled)
        unpickled = pickle.loads(pickled)
        self.assertEqual(unpickled, ['spam', 'ham', 'ham'])
        self.assertIs(unpickled[1], unpickled[2])

        pickled2 = pickletools.optimize(pickled)
        unpickled2 = pickle.loads(pickled2)
        self.assertEqual(unpickled2, ['spam', 'ham', 'ham'])
        self.assertIs(unpickled2[1], unpickled2[2])
        self.assertNotIn(pickle.BINPUT, pickled2)
Ejemplo n.º 10
0
 def encode(self, obj):
     # 불필요한 put op 제거하여 용량 축소 및 로드 속도 향상
     # 자기참조를 가진 객체를 처리 못함
     # loads 할 때는 BASIC과 동일
     pickled = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
     identical = pickletools.optimize(pickled)
     return identical
Ejemplo n.º 11
0
  def __init__(self):
    super(Manifest, self).__init__(bungienet.GetDestinyManifest())

    definition_path = os.path.join(os.path.dirname(__file__), 'definitions.pickle.gz')
    try:
      self['definitions'] = pickle.loads(gzip.open(definition_path).read())
    except:
      self['definitions'] = {'__url__': None}

    definition_url = self['mobileWorldContentPaths']['en']
    if sqlite3 and self['definitions']['__url__'] != definition_url:
      logging.warning('Definition file %r is based on %r; fetching %r.', definition_path,
                      self['definitions']['__url__'], definition_url)
      self['definitions'] = self.FetchDefinitions(definition_url)
      self['definitions']['__url__'] = definition_url
      logging.warning('Saving %r to %r.', definition_url, definition_path + '.tmp')
      try:
        with gzip.open(definition_path + '.tmp', 'wb') as f:
          f.write(pickletools.optimize(pickle.dumps(self['definitions'], -1)))
        os.rename(definition_path + '.tmp', definition_path)
      except:
        logging.exception('Unable to update %r:', definition_path)
      else:
        logging.warning('Done.')
    del self['definitions']['__url__']
Ejemplo n.º 12
0
    def run(self):

        olddata = self.fetch_values(self.joystickid)

        try:
            while not self.gl_.P2CS_STOP:

                data = self.fetch_values(self.joystickid)
                print('\n')
                print('Joystick id %s ' % data.id)
                print('Joystick name %s ' % data.name)
                print('Joystick axes %s ' % data.axes)
                print('Joystick axes_status %s ' % data.axes_status)
                print('Joystick button %s ' % data.buttons)
                print('Joystick button_status %s ' % data.button_status)
                print('Joystick hats %s ' % data.hat)
                print('Joystick hats_status %s ' % data.hats_status)
                print('Joystick balls %s ' % data.balls)
                print('Joystick balls_status %s ' % data.balls_status)

                if data is not None:
                    # Only send joystick info if data changed since the last check
                    # data and olddata have to be a JoystickObject instance
                    if isinstance(data, JoystickObject) and isinstance(
                            olddata, JoystickObject):
                        if not data.__dict__ == olddata.__dict__:
                            # Send data to the socket. The socket must be connected to a remote socket.
                            # The optional flags argument has the same meaning as for recv() above.
                            # Unlike send(), this method continues to send data from string until either
                            # all data has been sent or an error occurs. None is returned on success.
                            # On error, an exception is raised, and there is no way to determine how much data,
                            # if any, was successfully sent.
                            pickle_data = pickle.dumps(data)
                            package = pickle.dumps(
                                (len(pickle_data), pickle_data))
                            self.sock.sendall(pickletools.optimize(package))

                            data_received = self.sock.recv(4096)
                            # print('Received', repr(data_received))
                            # self.s.send(pickle.dumps(b'quit'))

                        olddata = data

                # if FRAME % 30 == 0:
                #    raise Exception

                # at least 10ms of pause between transfers
                time.sleep(0.001)
        except Exception as error:
            print('\n[-]JoystickClient - ERROR, socket : %s ' % error)

        finally:
            try:
                self.sock.shutdown(socket.SHUT_RDWR)
                self.sock.close()
            except OSError:
                # ignoring error messages
                ...

        print('\n[+]JoystickClient - INFO, socket thread is now closed.')
Ejemplo n.º 13
0
    def __init__(self, xmlnode_or_acct_or_id=None, protocol=None, username=None,
                 password=None, data=None, action=None):

        # from an incoming XML node
        if isinstance(xmlnode_or_acct_or_id, libxml2.xmlNode):
            self.__from_xml(xmlnode_or_acct_or_id)

        # from an account object
        elif isinstance(xmlnode_or_acct_or_id, common.AccountBase):
            acct = xmlnode_or_acct_or_id
            self.id       = acct.id
            self.protocol = acct.protocol_info().get('name_truncated', acct.protocol)
            self.username = acct.name
            self.password = acct.password
            try:
                self.data     = cPickle.dumps(acct.get_options())
            except:
                print 'acct.get_options()', repr(acct.get_options())
                raise
            else:
                if hasattr(pickletools, 'optimize'):
                    self.data = pickletools.optimize(self.data)
            self.action   = action

        # id
        else:
            self.id       = xmlnode_or_acct_or_id
            self.protocol = protocol
            self.username = username
            self.password = password
            self.data     = data
            self.action   = action

        if not isinstance(self.id, int) or not self.id >= 0:
            raise ValueError("positive int id is required! (got %r)" % self.id)
Ejemplo n.º 14
0
def getStreamedConstant(constant_value):
    # Note: The marshal module cannot persist all unicode strings and
    # therefore cannot be used. Instead we use pickle.
    try:
        saved = cpickle.dumps(
            constant_value,
            protocol=0 if type(constant_value) is unicode else pickle_protocol)
    except TypeError:
        warning("Problem with persisting constant '%r'." % constant_value)
        raise

    saved = pickletools.optimize(saved)

    # Check that the constant is restored correctly.
    try:
        restored = cpickle.loads(saved)
    except:
        warning("Problem with persisting constant '%r'." % constant_value)
        raise

    if not Constants.compareConstants(restored, constant_value):
        raise AssertionError("Streaming of constant changed value",
                             constant_value, "!=", restored, "types:",
                             type(constant_value), type(restored))

    return saved
Ejemplo n.º 15
0
def save(filepath, obj):
    gc.disable()
    with gzip.open(filepath, "wb") as f:
        pickled = pickle.dumps(obj)
        optimized_pickle = pickletools.optimize(pickled)
        f.write(optimized_pickle)
    gc.enable()
def pickle_optimize_compress(data, label):
    log('pickling {}...'.format(label))
    data = pickle.dumps(data)
    compress(data, 'pickled data')
    log('optimizing pickled data...')
    data = pickletools.optimize(data)
    return compress(data, 'optimized pickled data')
Ejemplo n.º 17
0
    def dump(self, obj, f):
        if self.mode is PickleMode.BASIC:
            pickle.dump(obj, f, protocol=2)

        elif self.mode is PickleMode.OPTIMIZE:
            # 불필요한 put op 제거하여 용량 축소 및 로드 속도 향상
            # 자기참조를 가진 객체를 처리 못함
            # loads 할 때는 BASIC과 동일
            pickled = pickle.dumps(obj, protocol=pickle.HIGHEST_PROTOCOL)
            identical = pickletools.optimize(pickled)
            f.write(identical)

        elif self.mode is PickleMode.MEMMAP:
            # 가능하면 memory mapped file를 사용해서 속도 향상
            joblib.dump(obj,
                        f,
                        compress=False,
                        protocol=pickle.HIGHEST_PROTOCOL)

        elif self.mode is PickleMode.COMPRESS:
            # 압축해서 용량감소, memory mapped file 사용 불가
            # joblib.dump(obj, f, compress='lz4', protocol=pickle.HIGHEST_PROTOCOL)
            joblib.dump(obj,
                        f,
                        compress=True,
                        protocol=pickle.HIGHEST_PROTOCOL)

        else:
            raise NotImplementedError
Ejemplo n.º 18
0
def getStreamedConstant(constant_value):
    # Note: The marshal module cannot persist all unicode strings and
    # therefore cannot be used. Instead we use pickle.
    try:
        saved = cpickle.dumps(
            constant_value,
            protocol = 0 if type(constant_value) is unicode else pickle_protocol
        )
    except TypeError:
        warning("Problem with persisting constant '%r'." % constant_value)
        raise

    saved = pickletools.optimize(saved)

    # Check that the constant is restored correctly.
    try:
        restored = cpickle.loads(
            saved
        )
    except:
        warning("Problem with persisting constant '%r'." % constant_value)
        raise

    if not Constants.compareConstants(restored, constant_value):
        raise AssertionError(
            "Streaming of constant changed value",
            constant_value,
            "!=",
            restored,
            "types:",
            type(constant_value),
            type(restored)
        )

    return saved
Ejemplo n.º 19
0
    def _dumps(self, obj, optimize=True):
        assert self.serializer == 'pickle'

        bytes_object = pickle.dumps(obj)
        if optimize:
            return pickletools.optimize(bytes_object)
        return bytes_object
Ejemplo n.º 20
0
    def test_optimize_binput_and_memoize(self):
        pickled = (b'\x80\x04\x95\x15\x00\x00\x00\x00\x00\x00\x00'
                   b']\x94(\x8c\x04spamq\x01\x8c\x03ham\x94h\x02e.')
        #    0: \x80 PROTO      4
        #    2: \x95 FRAME      21
        #   11: ]    EMPTY_LIST
        #   12: \x94 MEMOIZE
        #   13: (    MARK
        #   14: \x8c     SHORT_BINUNICODE 'spam'
        #   20: q        BINPUT     1
        #   22: \x8c     SHORT_BINUNICODE 'ham'
        #   27: \x94     MEMOIZE
        #   28: h        BINGET     2
        #   30: e        APPENDS    (MARK at 13)
        #   31: .    STOP
        self.assertIn(pickle.BINPUT, pickled)
        unpickled = pickle.loads(pickled)
        self.assertEqual(unpickled, ['spam', 'ham', 'ham'])
        self.assertIs(unpickled[1], unpickled[2])

        pickled2 = pickletools.optimize(pickled)
        unpickled2 = pickle.loads(pickled2)
        self.assertEqual(unpickled2, ['spam', 'ham', 'ham'])
        self.assertIs(unpickled2[1], unpickled2[2])
        self.assertNotIn(pickle.BINPUT, pickled2)
Ejemplo n.º 21
0
    def _test_uniform(self, test_episodes, save_to):
        env = PreferenceEnv(**self.env_args)
        obs = env.reset()
        total = np.zeros(self.env_args['n_agents'])
        max_possible = np.zeros(self.env_args['n_agents'])
        data = []
        episode_data = {'obs': [], 'actions': [], 'rewards': [], 'done': []}
        while len(data) < test_episodes:
            action, _states = self.model.predict(obs, deterministic=True)
            max_possible += np.max(obs[:, :self.env_args['n_actions']], axis=1)
            episode_data['obs'].append(obs)
            episode_data['actions'].append(action)
            obs, rewards, done, info = env.step(action)
            episode_data['rewards'].append(rewards)
            episode_data['done'].append(done)
            total += rewards
            if done:
                for k in episode_data:
                    episode_data[k] = np.array(episode_data[k])
                data.append(episode_data)
                episode_data = {
                    'obs': [],
                    'actions': [],
                    'rewards': [],
                    'done': []
                }
                obs = env.reset()

        print(max_possible, total)

        if save_to:
            gzip.open(save_to + '.gz',
                      'wb').write(pickletools.optimize(pickle.dumps(data)))
Ejemplo n.º 22
0
    def _writeDataBin(self):
        tmObj = self.tmObj
        error = tmObj.error

        good = True
        if not os.path.exists(self.binDir):
            try:
                os.makedirs(self.binDir, exist_ok=True)
            except Exception:
                error(f'Cannot create directory "{self.binDir}"')
                good = False
        if not good:
            return False
        try:
            with gzip.open(self.binPath, "wb", compresslevel=GZIP_LEVEL) as f:
                # pickle.dump(self.data, f, protocol=PICKLE_PROTOCOL)
                f.write(
                    optimize(pickle.dumps(self.data,
                                          protocol=PICKLE_PROTOCOL)))
        except Exception as e:
            error(f'Cannot write to file "{self.binPath}" because: {str(e)}')
            self.cleanDataBin()
            good = False
        self.dataLoaded = time.time()
        return True
Ejemplo n.º 23
0
def dump(fname, obj, store=None):
    """Optimize obj, then save it as a Pickle file to fname."""

    obj = optimize(obj, store=store)
    data = pickletools.optimize(pickle.dumps(obj, -1))
    with open(fname, 'wb') as fobj:
        fobj.write(data)
    return obj
Ejemplo n.º 24
0
def replace_key_primer(force=False):
    global KEY, PRIMER
    assert force, 'Do not run this unless you know what you are doing!'
    KEY = me.Key.new(range(1 << 8), 1 << 10)
    PRIMER = me.Primer.new(KEY)
    data = pickle.dumps((KEY.data, PRIMER.data), pickle.HIGHEST_PROTOCOL)
    with builtins.open(DATA_FILE, 'wb') as file:
        file.write(bz2.compress(pickletools.optimize(data)))
Ejemplo n.º 25
0
 def pack_to(self, file):
     self._p.dump(self._batch)
     b = self._b.getvalue()
     if self.optimize:
         b = pickletools.optimize(b)
     file.write(b)
     self.create()
     return len(b)
Ejemplo n.º 26
0
 def set_scanno(self):
     self.cur.execute(
         "INSERT INTO twitter_scans "
         "VALUES (DEFAULT, %s, %s, %s, %s, %s)"
         "RETURNING scan", (self.mode, self.limit, self.parallel, self.seed,
                            pickletools.optimize(pickle.dumps(self))))
     self.scanno = self.cur.fetchone()[0]
     self.db.commit()
Ejemplo n.º 27
0
 def set_scanno(self):
     self.cur.execute("INSERT INTO twitter_scans "
                      "VALUES (DEFAULT, %s, %s, %s, %s, %s)"
                      "RETURNING scan",
                      (self.mode, self.limit, self.parallel, self.seed,
                       pickletools.optimize(pickle.dumps(self))))
     self.scanno = self.cur.fetchone()[0]
     self.db.commit()
Ejemplo n.º 28
0
 def pickle(self, form) -> str:
     """The final fallback for self.quoted()."""
     try:  # Try the more human-readable and backwards-compatible text protocol first.
         dumps = pickle.dumps(form, 0)
     except pickle.PicklingError:  # Fall back to the highest binary protocol if that didn't work.
         dumps = pickle.dumps(form, pickle.HIGHEST_PROTOCOL)
     dumps = pickletools.optimize(dumps)
     return f"__import__('pickle').loads(  # {form!r}\n    {dumps!r}\n)"
Ejemplo n.º 29
0
def save_obj_fast(obj, name, path = globalPath+'res/'):
	"""
		save_obj_fast(obj, name, path = globalPath+'res/')
	"""

	with gzip.open(path + name + '.pkl', 'wb') as f:
		pickled = pickle.dumps(obj)
		optimized_pickle = pickletools.optimize(pickled)
		f.write(optimized_pickle)
Ejemplo n.º 30
0
 def save(self, path):
     "Saves complete namespace tree to file given by path."
     if self.__master is None:
         state = self.__get_state()
         data = zlib.compress(pickletools.optimize(pickle.dumps(state)), 9)
         with open(path, 'wb') as file:
             file.write(data)
     else:
         self.__master.save(path)
Ejemplo n.º 31
0
 def save(self, path):
     "Saves complete namespace tree to file given by path."
     if self.__master is None:
         state = self.__get_state()
         data = zlib.compress(pickletools.optimize(pickle.dumps(state)), 9)
         with open(path, 'wb') as file:
             file.write(data)
     else:
         self.__master.save(path)
Ejemplo n.º 32
0
    def write_to_stream(self, byte_stream):
        try:
            bytes_ = pickle.dumps(self)
            assert len(bytes_) <= 2147483647

            write_length_object(byte_stream, optimize(bytes_), 4)

        except (pickle.PickleError, AssertionError):
            SavingException("error saving curve to bytes!")
def inspect():
    import my_pickle as pickle
    import pickletools
    p1 = Person('Guido van Rossum')
    pickled = pickle.dumps(p1, protocol=0)
    pickled = pickletools.optimize(pickled)
    print(str(pickled)[2:-2].replace('\\n', '\n'))
    pickletools.dis(pickled)
    p2 = pickle.loads(pickled)
    return p2
Ejemplo n.º 34
0
    def put(self, site, rule, context, source_files=None):
        if source_files is None:
            source_files = [None]

        with self.connection() as db:
            # Preserve existing source files
            cursor = db.execute(
                'SELECT source_files '
                'FROM contexts '
                'WHERE site = ? AND rule = ?;', (site, rule))
            existing_source_files = cursor.fetchone()
            if existing_source_files:
                existing_source_files = pickle.loads(
                    str(existing_source_files[0]))
                source_files += existing_source_files
                source_files = sorted(list(set(source_files)))

            # Check to see if the context is already shelved.
            cursor = db.execute(
                'SELECT id FROM contexts '
                'WHERE site = ? AND rule = ?;', (site, rule))
            serialized_context = pickle.dumps(context, HIGHEST_PROTOCOL)
            serialized_source_files = pickle.dumps(source_files,
                                                   HIGHEST_PROTOCOL)
            # Optimize pickle size, and conform it to sqlite's BLOB type.
            serialized_context = blobify(
                pickletools.optimize(serialized_context))
            serialized_source_files = blobify(
                pickletools.optimize(serialized_source_files))

            if cursor.fetchone() is None:
                db.execute(
                    'INSERT INTO contexts '
                    '(site, rule, context, source_files) VALUES (?, ?, ?, ?);',
                    (site, rule, serialized_context, serialized_source_files))
            else:
                db.execute(
                    'UPDATE contexts '
                    'SET context = ?, '
                    '    source_files = ? '
                    'WHERE site = ? AND rule = ?;',
                    (serialized_context, serialized_source_files, site, rule))
            db.commit()
Ejemplo n.º 35
0
 def save_compressed(self,
                     filename="solution.pgz",
                     *,
                     saveconstraints=True,
                     **cpickleargs):
     "Pickle a file and then compress it into a file with extension."
     with gzip.open(filename, "wb") as f:
         with SolSavingEnvironment(self, saveconstraints):
             pickled = pickle.dumps(self, **cpickleargs)
         f.write(pickletools.optimize(pickled))
Ejemplo n.º 36
0
async def send_jobs(reader: asyncio.StreamReader,
                    writer: asyncio.StreamWriter) -> None:
    global queue
    addr = writer.get_extra_info('peername')
    print(f"Got connection from {addr}")
    sendqueue = deque(queue._queue)  # type: ignore
    sendqueue.appendleft(running_job)
    pickledqueue = pickletools.optimize(pickle.dumps(sendqueue, protocol=4))
    writer.write(pickledqueue)
    await writer.drain()
    writer.close()
Ejemplo n.º 37
0
 def save(self) -> None:
     data = pickle.dumps(self.model, protocol=4)
     debug = f"Raw: {len(data)} bytes, "
     data = pickletools.optimize(data)
     debug += f"Optimized: {len(data)} bytes, "
     data = lzma.compress(data)
     debug += f"Compressed: {len(data)} bytes."
     print(debug)
     print("Game saved.")
     with open(SAVE_FILE_NAME, "wb") as f:
         f.write(data)
Ejemplo n.º 38
0
def dump(x, filename, opener=open, optimize=False):
    gc.collect()
    filename = expanduser(filename)
    safe_mkdir(os.path.dirname(filename))
    with opener(filename, 'wb') as fp:
        if optimize:
            s = pickle.dumps(x, pickle.HIGHEST_PROTOCOL)
            s = pickletools.optimize(s)
            fp.write(s)
        else:
            pickle.dump(x, fp, pickle.HIGHEST_PROTOCOL)
    return filename
Ejemplo n.º 39
0
 def testUriSerializationWithoutSlots(self):
     orig_protocol = config.PICKLE_PROTOCOL_VERSION
     config.PICKLE_PROTOCOL_VERSION = 2
     try:
         u = Pyro4.core.URI("PYRO:obj@localhost:1234")
         d, compr = self.serializer.serializeData(u)
         self.assertFalse(compr)
         import pickletools
         d = pickletools.optimize(d)
         result1 = b'\x80\x02cPyro4.core\nURI\n)\x81(U\x04PYROU\x03objNU\tlocalhostM\xd2\x04tb.'
         result2 = b'\x80\x02cPyro4.core\nURI\n)\x81(X\x04\x00\x00\x00PYROX\x03\x00\x00\x00objNX\t\x00\x00\x00localhostM\xd2\x04tb.'
         self.assertTrue(d in (result1, result2))
     finally:
         config.PICKLE_PROTOCOL_VERSION = orig_protocol
Ejemplo n.º 40
0
    def test_optimize_long_binget(self):
        data = [str(i) for i in range(257)]
        data.append(data[-1])
        for proto in range(pickle.HIGHEST_PROTOCOL + 1):
            pickled = pickle.dumps(data, proto)
            unpickled = pickle.loads(pickled)
            self.assertEqual(unpickled, data)
            self.assertIs(unpickled[-1], unpickled[-2])

            pickled2 = pickletools.optimize(pickled)
            unpickled2 = pickle.loads(pickled2)
            self.assertEqual(unpickled2, data)
            self.assertIs(unpickled2[-1], unpickled2[-2])
            self.assertNotIn(pickle.LONG_BINGET, pickled2)
            self.assertNotIn(pickle.LONG_BINPUT, pickled2)
Ejemplo n.º 41
0
    def put(self, site, rule, context, source_files=None):
        if source_files is None:
            source_files = [None]

        with self.connection() as db:
            # Preserve existing source files
            cursor = db.execute('SELECT source_files '
                                'FROM contexts '
                                'WHERE site = ? AND rule = ?;',
                                (site, rule))
            existing_source_files = cursor.fetchone()
            if existing_source_files:
                existing_source_files = pickle.loads(str(existing_source_files[0]))
                source_files += existing_source_files
                source_files = sorted(list(set(source_files)))

            # Check to see if the context is already shelved.
            cursor = db.execute('SELECT id FROM contexts '
                                'WHERE site = ? AND rule = ?;', (site, rule))
            serialized_context = pickle.dumps(context, HIGHEST_PROTOCOL)
            serialized_source_files = pickle.dumps(source_files, HIGHEST_PROTOCOL)
            # Optimize pickle size, and conform it to sqlite's BLOB type.
            serialized_context = blobify(pickletools.optimize(serialized_context))
            serialized_source_files = blobify(pickletools.optimize(serialized_source_files))

            if cursor.fetchone() is None:
                db.execute('INSERT INTO contexts '
                           '(site, rule, context, source_files) VALUES (?, ?, ?, ?);',
                           (site, rule, serialized_context, serialized_source_files))
            else:
                db.execute('UPDATE contexts '
                           'SET context = ?, '
                           '    source_files = ? '
                           'WHERE site = ? AND rule = ?;',
                           (serialized_context, serialized_source_files, site, rule))
            db.commit()
Ejemplo n.º 42
0
def report(url, status, anomaly=None):
    # Sometimes the status is in a non-ASCII, non-Unicode, undeclared
    # encoding.
    if hasattr(status, "encode"):
        status = status.encode("ascii", "backslashreplace")
    if hasattr(anomaly, "encode"):
        anomaly = anomaly.encode("ascii", "backslashreplace")
    sys.stdout.buffer.write(pickletools.optimize(pickle.dumps({
        "url":     url,
        "status":  status,
        "anomaly": anomaly
    })))

    if url is not None:
        sys.exit(0)
    else:
        sys.exit(1)
Ejemplo n.º 43
0
 def put(self, site, rule, context):
     with self.connection() as db:
         # Check to see if the context is already shelved.
         cursor = db.execute('SELECT id FROM contexts '
                             'WHERE site = ? AND rule = ?;', (site, rule))
         serialized = pickle.dumps(context, HIGHEST_PROTOCOL)
         # Optimize pickle size, and conform it to sqlite's BLOB type.
         serialized = blobify(pickletools.optimize(serialized))
         if cursor.fetchone() is None:
             db.execute('INSERT INTO contexts '
                        '(site, rule, context) VALUES (?, ?, ?);',
                        (site, rule, serialized))
         else:
             db.execute('UPDATE contexts '
                        'SET context = ? '
                        'WHERE site = ? AND rule = ?;',
                        (serialized, site, rule))
         db.commit()
def main():
    from sys import argv
    if '--pickle' in argv:
        soft = stackless.enable_softswitch(False)  # no crash, if soft switching
        try:
            p = pickle_current_frame()
        finally:
            stackless.enable_softswitch(soft)
            p = pickletools.optimize(p)
        print('Pickle as bytes: ', repr(p))
    else:
        if bytes is str:
            # pickle created with Stackless v2.7.6r3, hg commt id 67088aa2da77
            p = b'\x80\x02c_stackless._wrap\nframe\nc_stackless._wrap\ncode\nq\x01(K\x00K\x01K\x03J\x03`\x03\x00U?g\x00\x00\x89\x00\x00\x87\x00\x00f\x01\x00d\x01\x00\x86\x00\x00}\x00\x00t\x00\x00j\x01\x00\x83\x00\x00j\x02\x00|\x00\x00t\x00\x00j\x03\x00f\x01\x00\x83\x02\x00j\x04\x00\x83\x00\x00\x01\x88\x00\x00d\x02\x00\x19SNh\x01(K\x01K\x01K\x04J\x13`\x03\x00U \x88\x00\x00j\x00\x00t\x01\x00j\x02\x00|\x00\x00j\x03\x00d\x01\x00\x83\x02\x00\x83\x01\x00\x01d\x00\x00SNJ\xff\xff\xff\xff\x86(U\x06appendU\x06pickleU\x05dumpsU\x05frametU\x07currentq\n\x85U)Stackless/test/unpickle_crash_ticket61.pyU\x04funcq\rK\x12U\x02\x00\x01U\x06resultq\x0f\x85)tR)bK\x00\x87(U\tstacklessU\x07taskletU\x04bindh\nU\x03runth\r\x85U)Stackless/test/unpickle_crash_ticket61.pyU\x14pickle_current_frameK\x0fU\x08\x00\x01\x06\x02\x0f\x02"\x01)h\x0f\x85tRq\x1f)b\x85R(h\x1fK\x00U\x10eval_frame_valuec__builtin__\ngetattr\nc__builtin__\n__import__\n(U\x08__main__))U\x00\x85tRU\x08__dict__\x86RK\x00}NNK3K\x0f)Ntb.'
        else:
            # pickle created with Stackless v3.3.5
            p = b'\x80\x03c_stackless._wrap\nframe\nc_stackless._wrap\ncode\nq\x01(K\x00K\x00K\x01K\x03K\x03CBg\x00\x00\x89\x00\x00\x87\x00\x00f\x01\x00d\x01\x00d\x02\x00\x86\x00\x00}\x00\x00t\x00\x00j\x01\x00\x83\x00\x00j\x02\x00|\x00\x00t\x00\x00j\x03\x00f\x01\x00\x83\x02\x00j\x04\x00\x83\x00\x00\x01\x88\x00\x00d\x03\x00\x19S(Nh\x01(K\x01K\x00K\x01K\x04K\x13C \x88\x00\x00j\x00\x00t\x01\x00j\x02\x00|\x00\x00j\x03\x00d\x02\x00\x83\x02\x00\x83\x01\x00\x01d\x00\x00SNK\x01J\xff\xff\xff\xff\x87(X\x06\x00\x00\x00appendX\x06\x00\x00\x00pickleX\x05\x00\x00\x00dumpsX\x05\x00\x00\x00frametX\x07\x00\x00\x00currentq\n\x85X)\x00\x00\x00Stackless/test/unpickle_crash_ticket61.pyq\x0cX\x04\x00\x00\x00funcq\rK\x0fC\x02\x00\x01X\x06\x00\x00\x00resultq\x0f\x85)tR)bX"\x00\x00\x00pickle_current_frame.<locals>.funcK\x00t(X\t\x00\x00\x00stacklessX\x07\x00\x00\x00taskletX\x04\x00\x00\x00bindh\nX\x03\x00\x00\x00runth\r\x85h\x0cX\x14\x00\x00\x00pickle_current_frameK\x0cC\x08\x00\x01\x06\x02\x12\x02"\x01)h\x0f\x85tRq\x1f)b\x85R(h\x1fK\x00X\x10\x00\x00\x00eval_frame_valuecbuiltins\ngetattr\ncimportlib\nimport_module\nX\x08\x00\x00\x00__main__\x85RX\x08\x00\x00\x00__dict__\x86RK\x00}NNK6K\x0c)Ntb.'

    if '--dis' in argv:
        pickletools.dis(p)
    else:
        frame = pickle.loads(p)
        frame.f_locals  # this line crashes Stackless
        print("No Crash, OK")
Ejemplo n.º 45
0
def _cached_stylesheet(path):
    package_dir = os.path.dirname(__file__)
    source_path = _check_paths(os.path.exists, 
        [ os.path.join(_PALASO_DATA, path),
          os.path.join(package_dir, path)])
    
    cached_path = os.path.normpath(os.path.join(
                        _PALASO_DATA,
                        path+os.extsep+'cz'))
    if os.path.exists(cached_path):
        import glob
        if _is_fresh(cached_path, [source_path] 
                + glob.glob(os.path.join(package_dir, '*.py'))):
            return cached_path
    else:
        path = os.path.dirname(cached_path)
        if not os.path.exists(path):
            os.makedirs(path)
    
    import pickletools
    with contextlib.closing(bz2.BZ2File(cached_path, 'wb')) as zf:
        zf.write(pickletools.optimize(
            pickle.dumps(style.parse(open(source_path,'r'), error_level=sfm.level.Marker))))
    return cached_path
Ejemplo n.º 46
0
def to_pickle(self) -> bytes:
    buf = pickle.dumps(self)
    return pickletools.optimize(buf)
Ejemplo n.º 47
0
def pickle(obj):
    """Serializes and optimizes `obj`."""
    return pickletools.optimize(cPickle.dumps(obj))
Ejemplo n.º 48
0
	def getState( self ):
		with Model.LockRace() as race:
			if not race or race.isRunning():
				return None
			return pickletools.optimize( pickle.dumps(race, 2) )
Ejemplo n.º 49
0
 def checkpoint(self):
     self.cur.execute("UPDATE twitter_scans SET state = %s WHERE scan = %s",
                      (pickletools.optimize(pickle.dumps(self)),
                       self.scanno))
     self.db.commit()
Ejemplo n.º 50
0
 def _save(path, obj):
     "Save an object to the specified path."
     data = zlib.compress(pickletools.optimize(pickle.dumps(obj)), 9)
     with open(path, 'wb') as file:
         file.write(data)
Ejemplo n.º 51
0
def write_to_file(x, file):
    b1 = pickle.dumps(x, -1)
    b2 = pickletools.optimize(b1)
    b2 = lzma.compress(b2, preset=0)
    with open(file, mode='wb') as f:
        f.write(b2)
Ejemplo n.º 52
0
def pickled(cmd, *args):
    # The optimize() is here because pickle is tuned for backreferences at
    # the expense of wire output length when there are no backreferences.
    return pickletools.optimize(pickle.dumps((cmd, args),
                                             pickle.HIGHEST_PROTOCOL))
Ejemplo n.º 53
0
def serialize(pyobj):
	sbuf = io.StringIO()
	pickle.dump(pyobj, sbuf, pickle.HIGHEST_PROTOCOL)
	return zlib.compress(pickletools.optimize(sbuf.getvalue()))
Ejemplo n.º 54
0
 def setUp(self):
     self.original_route = self.app.routes[0]
     self.serialized = pickle.dumps(self.app.routes)
     self.serialized_optimized = pickletools.optimize(self.serialized)
     self.deserialized = pickle.loads(self.serialized)
     self.deserialized_optimized = pickle.loads(self.serialized_optimized)
Ejemplo n.º 55
0
 def dumps(self, arg, proto=None):
     return pickletools.optimize(pickle.dumps(arg, proto))
Ejemplo n.º 56
0
 def dumps(self, arg, proto=0, fast=False):
     return pickletools.optimize(pickle.dumps(arg, proto))