def GetWxBitmap(self):

        (width, height) = self._size

        if self._format == wx.BitmapBufferFormat_RGB:
            return wx.BitmapFromBuffer(width, height, lz4.loads(self._data))
        else:
            return wx.BitmapFromBufferRGBA(width, height,
                                           lz4.loads(self._data))
Exemple #2
0
  def Decompress(self, Name, output_path="", data_lst=""):
    if output_path != "":
      os.chdir(output_path)
    if os.path.isdir(output_path):
      output_name = output_path+'/'+os.path.basename(Name.replace(".lz4",""))
    elif output_path == "":
      output_name = Name.replace(".lz4","")
    else:
      output_name = Name.replace(".lz4","")

    if zipfile.is_zipfile(Name) == False:
      f = open(output_name, "wb")
      f2 = open(Name,'rb')
      data = f2.read()
      f.write(lz4.loads(data))
      f.close()
      f2.close()
    else:
      f = zipfile.ZipFile(Name, 'r')
      for fname in f.namelist():
        if data_lst != "":
          if fname.find(data_lst) != -1:
            pass
          else:
            continue
        if fname.find("@@") != -1:
          larg_file_name = '/'+os.path.basename(fname)
          larg_file_name = fname.replace(larg_file_name,"").replace("@@","")
          try:
            extrakt_dir = larg_file_name.replace(os.path.basename(larg_file_name),"")
            os.makedirs(extrakt_dir)
          except:
            if os.path.exists(extrakt_dir):
              pass
            else:
              raise
          data = f.read(fname)
          f2 = open(larg_file_name,'ab')
          f2.write(lz4.loads(data))
          f2.close()
        else:
          try:
            extrakt_dir = fname.replace(os.path.basename(fname),"")
            os.makedirs(extrakt_dir)
          except:
            if os.path.exists(extrakt_dir):
              pass
            else:
              raise
          f2 = open(fname,'wb')
          #print fname
          data = f.read(fname)
          f2.write(lz4.loads(data))
          f2.close()
      f.close()
    def store_sample(self, input_bytes, filename, type_tag):
        """ Store a sample into the DataStore.
            Args:
                input_bytes: the actual bytes of the sample e.g. f.read()
                filename: name of the file (used purely as meta data not for lookup)
                type_tag: ('exe','pcap','pdf','json','swf', or ...)
            Returns:
                the md5 of the sample.
        """

        # If the sample comes in with an unknown type_tag try to determine it
        if type_tag == 'unknown':
            print 'Info: Unknown File -- Trying to Determine Type...'
            type_tag = self.guess_type_tag(input_bytes, filename)

        # Do we have a compressed sample? If so decompress it
        if type_tag == 'lz4':
            input_bytes = lz4.loads(input_bytes)

        # Store the sample
        md5 = self.data_store.store_sample(input_bytes, filename, type_tag)

        # Add the type_tags to tags
        if type_tag != 'lz4':
            self.add_tags(md5, type_tag)

        return md5
    def store_sample(self, input_bytes, filename, type_tag):
        """ Store a sample into the DataStore.
            Args:
                input_bytes: the actual bytes of the sample e.g. f.read()
                filename: name of the file (used purely as meta data not for lookup)
                type_tag: ('exe','pcap','pdf','json','swf', or ...)
            Returns:
                the md5 of the sample.
        """

        # If the sample comes in with an unknown type_tag try to determine it
        if type_tag == 'unknown':
            print 'Info: Unknown File -- Trying to Determine Type...'
            type_tag = self.guess_type_tag(input_bytes)
            if type_tag == 'data':
                print 'Info: File -- Trying to Determine Type from filename...'
                ext = os.path.splitext(filename)[1][1:]
                if ext in ['mem', 'vmem']:
                    type_tag = 'mem'
                else:
                    print 'Alert: Failed to Determine Type for %s' % filename
                    exit(1)  # Temp

        # Do we have a compressed sample? If so decompress it
        if type_tag == 'lz4':
            input_bytes = lz4.loads(input_bytes)

        # Store the sample
        md5 = self.data_store.store_sample(input_bytes, filename, type_tag)

        # Add the type_tags to tags
        if type_tag != 'lz4':
            self.add_tags(md5, type_tag)

        return md5
Exemple #5
0
def acquire_manifest(version, platform, asset_qual, sound_qual, dest_file):
    meta = "/".join((DBMANIFEST.format(version), "all_dbmanifest"))
    m = SESSION.get(meta)
    m.raise_for_status()
    mp = map(lambda x: manifest_selector_t(*x.split(",")),
             filter(bool, m.text.split("\n")))

    get_file = None
    for selector in mp:
        if selector.platform == platform and \
           selector.asset_qual == asset_qual and \
           selector.sound_qual == sound_qual:
            get_file = selector.filename
            break

    abso = "/".join((DBMANIFEST.format(version), get_file))
    resp = SESSION.get(abso)
    resp.raise_for_status()

    buf = resp.content
    bio = io.BytesIO()
    bio.write(buf[4:8])
    bio.write(buf[16:])
    data = lz4.loads(bio.getvalue())
    with open(dest_file, "wb") as write_db:
        write_db.write(data)

    return dest_file
    def GetWxImage(self):

        (width, height) = self._size

        if self._format == wx.BitmapBufferFormat_RGB:
            return wx.ImageFromBuffer(width, height, lz4.loads(self._data))
        else:

            bitmap = wx.BitmapFromBufferRGBA(width, height,
                                             lz4.loads(self._data))

            image = wx.ImageFromBitmap(bitmap)

            wx.CallAfter(bitmap.Destroy)

            return image
def acquire_manifest(version, platform, asset_qual, sound_qual, dest_file):
    meta = "/".join(( DBMANIFEST.format(version), "all_dbmanifest" ))
    m = SESSION.get(meta)
    m.raise_for_status()
    mp = map(lambda x: manifest_selector_t(* x.split(",")), filter(bool, m.text.split("\n")))

    get_file = None
    for selector in mp:
        if selector.platform == platform and \
           selector.asset_qual == asset_qual and \
           selector.sound_qual == sound_qual:
            get_file = selector.filename
            break

    abso = "/".join(( DBMANIFEST.format(version), get_file ))
    resp = SESSION.get(abso)
    resp.raise_for_status()

    buf = resp.content
    bio = io.BytesIO()
    bio.write(buf[4:8])
    bio.write(buf[16:])
    data = lz4.loads(bio.getvalue())
    with open(dest_file, "wb") as write_db:
        write_db.write(data)

    return dest_file
Exemple #8
0
    def _parse(self, trc_raw, lz4_):
        """
        Parse a TRC file and populate the instance's attributes.

        :param str trc_raw: TRC as json string.
        """
        if lz4_:
            trc_raw = lz4.loads(trc_raw).decode("utf-8")
        trc = json.loads(trc_raw)
        self.isd = trc['isd']
        self.version = trc['version']
        self.time = trc['time']
        self.core_quorum = trc['core_quorum']
        self.trc_quorum = trc['trc_quorum']
        self.core_isps = trc['core_isps']
        self.root_cas = trc['root_cas']
        for subject in trc['core_ases']:
            cert_dict = base64.b64decode(
                trc['core_ases'][subject]).decode('utf-8')
            cert_dict = json.loads(cert_dict)
            cert_dict['subject_sig_key'] = base64.b64decode(
                cert_dict['subject_sig_key'])
            cert_dict['subject_enc_key'] = base64.b64decode(
                cert_dict['subject_enc_key'])
            cert_dict['signature'] = base64.b64decode(cert_dict['signature'])
            self.core_ases[subject] = Certificate.from_dict(cert_dict)
        self.policies = trc['policies']
        self.registry_server_addr = trc['registry_server_addr']
        self.registry_server_cert = trc['registry_server_cert']
        self.root_dns_server_addr = trc['root_dns_server_addr']
        self.root_dns_server_cert = trc['root_dns_server_cert']
        self.trc_server_addr = trc['trc_server_addr']
        for subject in trc['signatures']:
            self.signatures[subject] = \
                base64.b64decode(trc['signatures'][subject])
    def store_sample(self, input_bytes, filename, type_tag):
        """ Store a sample into the DataStore.
            Args:
                input_bytes: the actual bytes of the sample e.g. f.read()
                filename: name of the file (used purely as meta data not for lookup)
                type_tag: ('exe','pcap','pdf','json','swf', or ...)
            Returns:
                the md5 of the sample.
        """

        # If the sample comes in with an unknown type_tag try to determine it
        if type_tag == 'unknown':
            print 'Info: Unknown File -- Trying to Determine Type...'
            type_tag = self.guess_type_tag(input_bytes)
            if type_tag == 'data':
                print 'Info: File -- Trying to Determine Type from filename...'
                ext = os.path.splitext(filename)[1][1:]
                if ext in ['mem','vmem']:
                    type_tag = 'mem'
                else:
                    print 'Alert: Failed to Determine Type for %s' % filename
                    exit(1) # Temp

        # Do we have a compressed sample? If so decompress it
        if type_tag == 'lz4':
            input_bytes = lz4.loads(input_bytes)

        # Store the sample
        md5 = self.data_store.store_sample(input_bytes, filename, type_tag)

        # Add the type_tags to tags
        if type_tag != 'lz4':
            self.add_tags(md5, type_tag)

        return md5
Exemple #10
0
    def store_sample(self, input_bytes, filename, type_tag):
        """ Store a sample into the DataStore.
            Args:
                input_bytes: the actual bytes of the sample e.g. f.read()
                filename: name of the file (used purely as meta data not for lookup)
                type_tag: ('exe','pcap','pdf','json','swf', or ...)
            Returns:
                the md5 of the sample.
        """

        # If the sample comes in with an unknown type_tag try to determine it
        if type_tag == 'unknown':
            print 'Info: Unknown File -- Trying to Determine Type...'
            type_tag = self.guess_type_tag(input_bytes, filename)

        # Do we have a compressed sample? If so decompress it
        if type_tag == 'lz4':
            input_bytes = lz4.loads(input_bytes)

        # Store the sample
        md5 = self.data_store.store_sample(input_bytes, filename, type_tag)

        # Add the type_tags to tags
        if type_tag != 'lz4':
            self.add_tags(md5, type_tag)

        return md5
Exemple #11
0
 def decrypt(self, val, json=False):
     if not j.basetype.string.check(val):
         raise RuntimeError("needs to be string")
     val = j.db.serializers.blowfish.loads(val, self.key)
     val = lz4.loads(val)
     if json:
         val = ujson.loads(val)
     return val
Exemple #12
0
    def deserialize(self):
        """Extract the numpy array"""
        numpy_array = np.ndarray( shape=self.shape, dtype=self.dtype )
        
        # See serialization of 1D and 0D arrays, above.
        if numpy_array.ndim <= 1:
            buf = lz4.loads(self.serialized_subarrays[0])
            numpy_array[:] = np.frombuffer(buf, self.dtype).reshape( numpy_array.shape )
        else:
            for subarray, serialized_subarray in zip(numpy_array, self.serialized_subarrays):
                buf = lz4.loads(serialized_subarray)
                subarray[:] = np.frombuffer(buf, self.dtype).reshape( subarray.shape )
         
        if self.layout == 'F':
            numpy_array = numpy_array.transpose()

        return numpy_array
 def decrypt(self, val, json=False):
     if not j.basetype.string.check(val):
         raise RuntimeError("needs to be string")
     val = j.db.serializers.blowfish.loads(val, self.key)
     val = lz4.loads(val)
     if json:
         val = ujson.loads(val)
     return val
Exemple #14
0
def Decompresstion(sou, des):
    try:
        with open(des, 'wb') as out:
            with open(sou, 'rb') as inFile:
                out.write(lz4.loads(inFile.read()))
            out.flush()
            out.close()
    except IOError:
        print('文件找不到')
Exemple #15
0
def Decompresstion(sou, des):
    try:
        with open(des, 'wb') as out:
            with open(sou, 'rb') as inFile:
                out.write(lz4.loads(inFile.read()))
            out.flush()
            out.close()
    except IOError:
        print('文件找不到')
Exemple #16
0
    def _GetData(self):

        if self._compressed:

            return lz4.loads(self._data)

        else:

            return self._data
Exemple #17
0
 def _GetData( self ):
     
     if self._compressed:
         
         return lz4.loads( self._data )
         
     else:
         
         return self._data
Exemple #18
0
 def __getitem__(self, key):
     key = ("%s_%s" % (self.axis, key))
     data = np.fromstring(lz4.loads(self.store._db.get(key)),
                          dtype=np.float64)
     index_key = "columns" if self.axis==0 else "index"
     index = self.store._meta[index_key]
     x = pd.Series(data, index=index)
     x.name = key[2:]
     return x
Exemple #19
0
    def readLZ4(self, data, shape, dtype, size):
        """
        Unpack lz4 compressed frame and return np array image data
        """
        assert lz4 is not None, "No LZ4 module"
        dtype = np.dtype(dtype)
        data = lz4.loads(data)

        return np.reshape(np.fromstring(data, dtype=dtype), shape[::-1])
Exemple #20
0
 def from_raw(cls, chain_raw, lz4_=False):
     if lz4_:
         chain_raw = lz4.loads(chain_raw).decode("utf-8")
     chain = json.loads(chain_raw)
     certs = []
     for k in sorted(chain):
         cert = Certificate(chain[k])
         certs.append(cert)
     return CertificateChain(certs)
Exemple #21
0
 def _get(self, key):
     if isinstance(key, bytes):
         key = key.decode("utf-8")
     assert key != "_meta"
     data = np.fromstring(lz4.loads(self._db.get(key)),
                          dtype=np.float64)
     x = pd.Series(data, index=self._columns)
     x.name = key
     return x
Exemple #22
0
def lz4_decompress(data, size):
    try:
        import lz4
    except ImportError:
        raise RuntimeError("python-lz4 is required to read UnityFS files")

    # https://github.com/python-lz4/python-lz4/issues/6
    data = struct.pack("i", size) + data
    return lz4.loads(data)
Exemple #23
0
def lz4_decompress(data, size):
	try:
		import lz4
	except ImportError:
		raise RuntimeError("python-lz4 is required to read UnityFS files")

	# https://github.com/python-lz4/python-lz4/issues/6
	data = struct.pack("i", size) + data
	return lz4.loads(data)
Exemple #24
0
        def inner(*args, **kwargs):
            mode = wrapper.mode
            name = wrapper.name
            dir = wrapper.dir
            compress = wrapper.compress
            verbose = wrapper.verbose

            if mode in [False, 'skip']: return func(*args, **kwargs)

            if name == None: name = func.__name__
            if not os.path.exists(dir): os.makedirs(dir)

            cachePath = os.path.join(dir, name)

            if compress: import lz4

            if os.path.exists(cachePath) and mode != 'update':

                if compress == 'lz4':
                    cached = StringIO(lz4.loads(open(cachePath, 'r').read()))

                else:
                    cached = cachePath
                    #cached  = open(cachePath,'r')

                if verbose: print('\t!! Cached from %s' % cachePath)

                aOut = load(cached)

                if aOut.shape != () or purge_empty_file == False:
                    return aOut

                else:
                    os.remove(cachePath)
                    raise ValueError('empty cache file (erased): %s' %
                                     (cachePath))

            if os.path.exists(cachePath) == False or mode == 'update':
                aOut = func(*args, **kwargs)

                if compress == 'lz4':

                    cached = StringIO()
                    save(cached, aOut)
                    open(cachePath, 'w').write(lz4.dumps(cached.getvalue()))

                else:
                    fCache = open(cachePath, 'wb')
                    save(fCache, aOut)
                    fCache.close()

                if verbose: print('\t!! Cached to %s' % cachePath)
                return aOut

            raise KeyError('failed exception handling for %s and %s' %
                           (cachePath, mode))
Exemple #25
0
def main():
    parser = argparse.ArgumentParser()
    parser.add_argument(
        'name', default=[sys.stdin], nargs='*', type=argparse.FileType('rb'))
    args = parser.parse_args()
    for fh in args.name:
        magic = fh.read(len(_magic))
        if _magic != magic:
            raise ValueError("%s is not valid mozlz4: %r" % (fh.name, magic))
        sys.stdout.write(lz4.loads(fh.read()))
Exemple #26
0
    def readLZ4(self, data, shape, dtype, size):
        """
        Unpack lz4 compressed frame and return np array image data
        """
        import lz4

        dtype = np.dtype(dtype)
        data = lz4.loads(data)

        return np.reshape(np.fromstring(data, dtype=dtype), shape[::-1])
Exemple #27
0
        def inner(*args, **kwargs):
            mode        = wrapper.mode
            name        = wrapper.name
            dir         = wrapper.dir
            compress    = wrapper.compress
            verbose     = wrapper.verbose

            if mode in [False, 'skip']      :   return func( *args, **kwargs )

            if name == None                 :   name = func.__name__
            if not os.path.exists(dir)      :   os.makedirs(dir)

            cachePath   = os.path.join(dir, name)

            if compress                     :   import lz4


            if os.path.exists( cachePath ) and mode != 'update':

                if compress == 'lz4':
                    cached  = StringIO( lz4.loads( open(cachePath,'r').read() ) )

                else:
                    cached  = cachePath
                    #cached  = open(cachePath,'r')

                if verbose: print '\t!! Cached from %s'%cachePath

                aOut    = load( cached )

                if aOut.shape != () or purge_empty_file == False:
                    return aOut

                else:
                    os.remove( cachePath )
                    raise ValueError, 'empty cache file (erased): %s'%(cachePath)

            if os.path.exists( cachePath ) == False or mode == 'update':
                aOut    = func( *args, **kwargs )

                if compress == 'lz4':

                    cached  = StringIO()
                    save( cached, aOut )
                    open(cachePath,'w').write( lz4.dumps( cached.getvalue() ) )

                else:
                    fCache  = open(cachePath,'wb')
                    save( fCache, aOut )
                    fCache.close()

                if verbose: print '\t!! Cached to %s'%cachePath
                return aOut

            raise KeyError, 'failed exception handling for %s and %s'%( cachePath, mode )
Exemple #28
0
    def symbol_rows(self, symbol_contexts):
        for symbol, context in symbol_contexts:
            f = context.pop('_open_file')
            data = f.read()
            f.close()
            if settings.DATA_COMPRESSION == 'lz4':
                data = lz4.loads(data)

            # split the file into rows, slicing off the header labels
            csv_rows = data.strip().split('\n')[1:]
            yield csv_rows, context
Exemple #29
0
    def symbol_rows(self, symbol_contexts):
        for symbol, context in symbol_contexts:
            f = context.pop('_open_file')
            data = f.read()
            f.close()
            if settings.DATA_COMPRESSION == 'lz4':
                data = lz4.loads(data)

            # split the file into rows, slicing off the header labels
            csv_rows = data.strip().split('\n')[1:]
            yield csv_rows, context
Exemple #30
0
    def __init__(self, raw_message, *args, **kwargs):
        compression = kwargs.pop('compression', False)
        raw_message = lz4.loads(raw_message) if compression else raw_message
        message = msgpack.unpackb(raw_message)

        try:
            self.datas = message['datas']
        except KeyError:
            errors_logger.exception("Invalid response message : %s" %
                                    message)
            raise MessageFormatError("Invalid response message")
Exemple #31
0
def CreateFromNetworkString(network_string):

    try:

        obj_string = zlib.decompress(network_string)

    except zlib.error:

        obj_string = lz4.loads(network_string)

    return CreateFromString(obj_string)
Exemple #32
0
def get_resource(url, asset_name, flags):
    resp = SESSION.get(url)
    resp.raise_for_status()

    buf = resp.content
    if flags & 1:
        bio = io.BytesIO()
        bio.write(buf[4:8])
        bio.write(buf[16:])
        buf = lz4.loads(bio.getvalue())

    return buf
def get_resource(url, asset_name, flags):
    resp = SESSION.get(url)
    resp.raise_for_status()

    buf = resp.content
    if flags & 1:
        bio = io.BytesIO()
        bio.write(buf[4:8])
        bio.write(buf[16:])
        buf = lz4.loads(bio.getvalue())

    return buf
  def readLZ4(self, data, shape, dtype, size):
    """
    Unpack lz4 compressed frame and return np array image data

    """
    import numpy as np
    import lz4, bitshuffle

    dtype = np.dtype(dtype)
    data = lz4.loads(data)

    return np.reshape(np.fromstring(data, dtype=dtype), shape[::-1])
Exemple #35
0
 def pull_df(self, md5):
     """Wrapper for the Workbench get_dataframe method
         Args:
             md5: pull the dataframe identified by this md5
         Returns:
             The uncompressed/unserialized dataframe
     """
     try:
         _packed_df = self.workbench.get_dataframe(md5)
         _df = pd.read_msgpack(lz4.loads(_packed_df))
         return _df
     except zerorpc.exceptions.RemoteError as e:
         return repr_to_str_decorator.r_to_s(self._data_not_found)(e)
Exemple #36
0
 def pull_df(self, md5):
     """Wrapper for the Workbench get_dataframe method
         Args:
             md5: pull the dataframe identified by this md5
         Returns:
             The uncompressed/unserialized dataframe
     """
     try:
         _packed_df = self.workbench.get_dataframe(md5)
         _df = pd.read_msgpack(lz4.loads(_packed_df))
         return _df
     except zerorpc.exceptions.RemoteError as e:
         return repr_to_str_decorator.r_to_s(self._data_not_found)(e)
Exemple #37
0
        def wrapper( *args, **kwargs):

            if 'CACHED' in kwargs:
                Option  = kwargs.pop( 'CACHED' )
                for k,v in Option.items():
                    self.__dict__[ k ]   = v

            for k,v in kwargs.items():
                if k.startswith('CACHED_'):
                    self.__dict__[ k.split('_')[1].lower() ] = v


            if self.mode in [False, 'skip']:
                return func( *args, **kwargs )


            if type(self.name) != str:
                name = self.name( func.__name__, args )

            else:
                name = self.name

            if not os.path.exists(self.dir)      :   os.makedirs(self.dir)

            cachePath   = os.path.join(self.dir, name)

            if self.compress not in [False, None]:
                cachePath   = cachePath + '.%s'%self.compress

            if os.path.exists( cachePath ) and self.mode != 'update':
                if self.compress != 'lz4':
                    cached  = open(cachePath,'r')
                else:
                    cached  = StringIO( lz4.loads( open(cachePath,'r').read() ) )

                if self.verbose: print '\t!! Cached from %s'%cachePath
                return load( cached )

            else:
                aOut    = func( *args, **kwargs )

                if self.compress=='lz4':
                    cached  = StringIO()
                    save( cached, aOut )
                    open(cachePath,'w').write( lz4.dumps( cached.getvalue() ) )

                else:
                    save( open(cachePath,'w'), aOut )

                if self.verbose: print '\t!! Cached to %s'%cachePath
                return aOut
Exemple #38
0
    def readLZ4(self, frame, shape, dtype):
        """
        unpack lz4 compressed frame and return np array image data
        frame: zmq data blob frame
        shape: image shape
        dtype:image data type
        """
        dtype = np.dtype(dtype)
        dataSize = dtype.itemsize * shape[0] * shape[1]  # bytes * image size
        imgData = lz4.loads(struct.pack('<I', dataSize) + frame.bytes)
        if self._verbose:
            print("[OK] unpacked {0} bytes of lz4 data".format(len(imgData)))

        return np.reshape(np.fromstring(imgData, dtype=dtype), shape[::-1])
Exemple #39
0
    def readLZ4(self, frame, shape, dtype):
        """
        unpack lz4 compressed frame and return np array image data
        frame: zmq data blob frame
        shape: image shape
        dtype:image data type
        """
        dtype = np.dtype(dtype)
        dataSize = dtype.itemsize*shape[0]*shape[1] # bytes * image size
        imgData = lz4.loads(struct.pack('<I', dataSize) + frame.bytes)
        if self._verbose:
            print("[OK] unpacked {0} bytes of lz4 data".format(len(imgData)))

        return np.reshape(np.fromstring(imgData, dtype=dtype), shape[::-1])
Exemple #40
0
 def getarray(self):
     if self.array==None:
         # reassmble data
         data_stream = ""
         for n in xrange(0,nchunks):
             if self.compression==None:
                 data_stream += event_chunks[n]
             elif self.compression=="lz":
                 data_stream += lz4.loads( event_chunks[n] )
             else:
                 raise RuntimeError('unrecognized compression option "%s"'%(self.compression))
         # convert into numpy array
         data = cStringIO.StringIO( data_stream )
         self.array = np.load( data )
     return self.array
    def read_manifest(response):
        print("trace read_manifest", response)

        if response.error:
            return callback(None)

        buf = response.buffer.read()
        bio = io.BytesIO()
        bio.write(buf[4:8])
        bio.write(buf[16:])
        data = lz4.loads(bio.getvalue())
        with open(dest_file, "wb") as write_db:
            write_db.write(data)

        callback(dest_file)
Exemple #42
0
def read_stream_data(frames, bss_job_mode=4):
    import lz4
    import bitshuffle
    if len(frames) != 5:
        return None, None

    header = json.loads(frames[0].bytes)
    for i in (1, 3, 4):
        header.update(json.loads(frames[i].bytes))

    if header.get("bss_job_mode", 4) != bss_job_mode:
        return None, None

    dtype = header["type"]
    shape = header["shape"][::-1]

    if dtype in ("int32", "uint32"): byte = 4
    elif dtype in ("int16", "uint16"): byte = 2
    else: raise RuntimeError("Unknown dtype (%s)" % dtype)

    size = byte * shape[0] * shape[1]

    if header["encoding"] == "lz4<":
        data = lz4.loads(struct.pack('<I', size) + frames[2].bytes)
        data = numpy.fromstring(data, dtype=dtype).reshape(shape)
        assert data.size * data.dtype.itemsize == size
    elif header["encoding"] == "bs32-lz4<":
        data = frames[2].bytes
        blob = numpy.fromstring(data[12:], dtype=numpy.uint8)
        # blocksize is big endian uint32 starting at byte 8, divided by element size
        blocksize = numpy.ndarray(shape=(), dtype=">u4", buffer=data[8:12]) / 4
        data = bitshuffle.decompress_lz4(blob, shape, numpy.dtype(dtype),
                                         blocksize)
        data = data.reshape(shape)
    elif header["encoding"] == "bs16-lz4<":
        data = frames[2].bytes
        blob = numpy.fromstring(data[12:], dtype=numpy.uint8)
        data = bitshuffle.decompress_lz4(blob, shape, numpy.dtype(dtype))
        data = data.reshape(shape)
    else:
        RuntimeError("Unknown encoding (%s)" % header["encoding"])

    bad_sel = data == 2**(byte * 8) - 1
    data = data.astype(numpy.int32)
    data[bad_sel] = -1
    return header, data
    def deserialize(self):
        """Extract the numpy array"""
        
        index = 0
        deserialized_data = ""

        # retrieve lz4 chunks
        for chunk in self.serialized_data:
            deserialized_data += lz4.loads(chunk)
                
              
        # use stringio to use numpy import
        memfile = StringIO.StringIO()
        memfile.write(deserialized_data)
        memfile.seek(0)
        
        # memfile will close automatically
        return numpy.load(memfile)
Exemple #44
0
    def _parse(self, chain_raw, lz4_):
        """
        Parse a certificate chain file and populate the instance's attributes.

        :param str chain_raw: certificate chain as json string.
        """
        if lz4_:
            chain_raw = lz4.loads(chain_raw).decode("utf-8")
        chain = json.loads(chain_raw)
        for index in range(1, len(chain) + 1):
            cert_dict = chain[str(index)]
            cert_dict['subject_sig_key'] = \
                base64.b64decode(cert_dict['subject_sig_key'])
            cert_dict['subject_enc_key'] = \
                base64.b64decode(cert_dict['subject_enc_key'])
            cert_dict['signature'] = \
                base64.b64decode(cert_dict['signature'])
            cert = Certificate.from_dict(cert_dict)
            self.certs.append(cert)
Exemple #45
0
def read_stream_data(frames, bss_job_mode=4):
    import lz4
    import bitshuffle
    if len(frames) != 5:
        return None, None

    header = json.loads(frames[0].bytes)
    for i in (1,3,4): header.update(json.loads(frames[i].bytes))

    if header.get("bss_job_mode", 4) != bss_job_mode:
        return None, None

    dtype = header["type"]
    shape = header["shape"][::-1]

    if dtype in ("int32","uint32"): byte = 4
    elif dtype in ("int16","uint16"): byte = 2
    else: raise RuntimeError("Unknown dtype (%s)"%dtype)

    size = byte*shape[0]*shape[1]

    if header["encoding"] == "lz4<":
        data = lz4.loads(struct.pack('<I', size) + frames[2].bytes)
        data = numpy.fromstring(data, dtype=dtype).reshape(shape)
        assert data.size * data.dtype.itemsize == size
    elif header["encoding"] == "bs32-lz4<":
        data = frames[2].bytes
        blob = numpy.fromstring(data[12:],dtype=numpy.uint8)
        # blocksize is big endian uint32 starting at byte 8, divided by element size
        blocksize = numpy.ndarray(shape=(),dtype=">u4", buffer=data[8:12])/4
        data = bitshuffle.decompress_lz4(blob, shape, numpy.dtype(dtype), blocksize)
        data = data.reshape(shape)
    elif header["encoding"] == "bs16-lz4<":
        data = frames[2].bytes
        blob = numpy.fromstring(data[12:],dtype=numpy.uint8)
        data = bitshuffle.decompress_lz4(blob, shape, numpy.dtype(dtype))
        data = data.reshape(shape)
    else:
        RuntimeError("Unknown encoding (%s)"%header["encoding"])

    data = data.astype(numpy.int32)
    data[data==2**(byte*8)-1] = -1
    return header, data
Exemple #46
0
    def decorator(*args, **kwargs):
        if CACHE is None:
            initialize_cache()

        kwargs_tuple = tuple(sorted(kwargs.items()))
        if not isinstance(args, collections.Hashable) \
                or not isinstance(kwargs_tuple, collections.Hashable):
            msg = "Function arguments not hashable:"
            msg += "\n\targs:%s\n\tkwargs:%s"
            raise Exception(msg % (args, kwargs))
        key = "%s.%s[%s]" % (fn.__module__, fn.__name__,
                             hash((args, kwargs_tuple)))
        if not CACHE.exists(key):
            value = fn(*args, **kwargs)
            pickled_value = lz4.dumps(pickle.dumps(value))
            CACHE.setex(key, DEFAULT_EXPIRY, pickled_value)
            return value
        else:
            pickled_value = CACHE.get(key)
            return pickle.loads(lz4.loads(pickled_value))
    def got_master(response):
        print("trace got_master", response)

        if response.error:
            return done(None)

        buf = response.buffer.read()
        bio = io.BytesIO()
        bio.write(buf[4:8])
        bio.write(buf[16:])
        data = lz4.loads(bio.getvalue())
        with open(to_path, "wb") as write_db:
            write_db.write(data)

        mdate = response.headers.get("Last-Modified")
        if mdate:
            tt = parsedate_tz(mdate)
            mtime = mktime_tz(tt) if tt else int(time())
        else:
            mtime = int(time.time())
        os.utime(to_path, (-1, mtime))
        done(to_path)
    def receive_message(self, expected=None):
        msgheader = recv_data(self.socket, 8)
        messageVersion, messageId, messageType, compressed, length = unpack_msgheader(msgheader)

        data = recv_data(self.socket, length)

        #print(messageVersion, messageId, messageType, compressed, length)
        
        if compressed:
            data = bytearray(lz4.loads(bytes(data)))

        if expected:
            assert(messageType == expected or messageType in expected)

        if messageType in self.messageUnpack:
            msgdict, rest = self.messageUnpack[messageType](data)
            assert(len(rest) == 0) # consumed
        else:
            msgdict = {}
            print("message type {} not defined yet.".format(messageType))

        return msgdict, messageType
Exemple #49
0
    def process_data(self, compressed, data):
        if len(data) >= self.length_left:
                tmp = self.length_left
                self.length_left = False
                self.buf = data[tmp:]

                if self.current_meta & JSON_SERVER:
                    self.set_metadata_json(data[0:tmp])
                    self.connection.send_data((self.ld.get_json()))
                    self.buf = self.buf[9:]
                else:
                    if compressed:
                        self.fh.write(bytearray(lz4.loads(data[0:tmp])))
                    else:
                        self.fh.write(bytearray(data[0:tmp]))

                    if self.check_if_complete():
                        self.process_complete_file()

                #TODO: this is wrong
                #if len(self.buf) >= 9:
                    self.read_headers(self.buf)
        else:
            self.length_left = False
Exemple #50
0
 def from_raw(cls, trc_raw, lz4_=False):
     if lz4_:
         trc_raw = lz4.loads(trc_raw).decode("utf-8")
     trc = json.loads(trc_raw)
     return TRC(trc)
Exemple #51
0
def read_lz4(data):
    import lz4
    return lz4.loads(data)
 def loads(self, obj):
     return self.serializer.loads(lz4.loads(obj))
Exemple #53
0
def read_lz4(data):
    import lz4
    return lz4.loads(data)
 def unserialize(cls, data):
     return cls.from_dict(msgpack.loads(lz4.loads(data), encoding='utf-8'))
Exemple #55
0
 def test_random(self):
   DATA = os.urandom(128 * 1024)  # Read 128kb
   self.assertEqual(DATA, lz4.loads(lz4.dumps(DATA)))
def CreateFromNetworkString( network_string ):
    
    obj_string = lz4.loads( network_string )
    
    return CreateFromString( obj_string )
Exemple #57
0
def request_event( socket, request ):
    # initialize request
    request_file = b"%s"%(request.filename)
    request_firstrun = b"%d"%(request.first_run)
    request_firstevent = b"%d"%(request.first_event)
    request_nevents = b"%d"%(request.nevents)
    product_set = []
    for event,eventdata in request.data.items():
        for name,product in eventdata.dataproducts.items():
            if name not in product_set:
                product_set.append( name )
    str_request_products=""
    for product in product_set:
        str_request_products += product
        if product!=product_set[-1]:
            str_request_products += ":"
    print str_request_products
    request_products = b"%s"%(str_request_products)
    try:
        socket.send_multipart( [str(request.filename).strip(), str(request.first_run), str(request.first_event), str(request.nevents), str_request_products] )
        reply = socket.recv_multipart()
    except:
        print "trouble communicating with socket"
        return None
    if reply[0]=="LARSOFT_LAUNCHED":
        print "Request received. Retreiving data..."
    else:
        print "Nothing received"
        return None

    # time transfer
    tstart = time.time()

    # loop over events, dataproducts in request
    while not request.getfulfilled():
        # ask for event data product
        socket.send_multipart([b"REQUEST_EVENT",b"%d:%d"%(request.first_run, request.first_event)])
        msg = socket.recv_multipart()
        if msg[0]=="REQUEST_COMPLETE":
            print "Finished sending ",eventid,name
            break
        elif msg[0]=="WAITING_FOR_LARSOFT":
            time.sleep(5)
            continue
        elif "EVENTINFO:" in msg[0]:
            print "Server wants to send an event.: ",msg[0]
            # get event info 
            event_name = msg[0].split(":")[1]
            event_runid = int(msg[0].split(":")[2])
            event_eventid = int(msg[0].split(":")[3])
            event_product = msg[0].split(":")[4]
            compression = msg[0].split(":")[5]
            nchunks = int(msg[0].split(":")[6])
            event_data = request.data[ (event_runid, event_eventid ) ]

            # create event data product
            data = DataProducts( event_product, nchunks, event_eventid )
            event_data.dataproducts[ event_product ] = data
            data.server_name = event_name
            data.compression = compression

            # Get data product chunks
            while len(data.chunks)<nchunks:
                for n in xrange(0,nchunks):
                    if n in data.chunks:
                        continue
                    chunk_request = "CHUNK%d:%s:%s:%d"%(n,data.server_name,event_product,data.eventid)
                    print "asking for chunk: ",chunk_request
                    socket.send_multipart([chunk_request])
                    reply = socket.recv_multipart()
                    print "reply: ",reply[0]
                    chunkid = int(reply[0].split(":")[0][len("CHUNK"):])
                    data.chunks[chunkid] = lz4.loads( reply[1] ) # decompress
                print "Chunks after loop: ",len(data.chunks)
            # indicate data product is complete
            data.complete = True
            complete = True
        event_data.update()
        # end of while not complete loop
        # tell server event is done
    print "REQUEST FINISHED"
    socket.send_multipart(["COMPLETE:%s"%(str(request.filename).strip())])
    ok = socket.recv_multipart()
    print "event transfer time: ",time.time()-tstart," seconds."
    tstart = time.time()
    request.finish_thread = True
    return
Exemple #58
0
 def uncompress_chunk(self, compressed):
     # skip checksum
     data = compressed[0:len(compressed)-4]
     uncompressed = lz4.loads(data)
     return uncompressed