def test_serialization(self, verifying_key, fake_squeak_hash): # pubkey_bytes = verifying_key.to_bytes() interested = [ CInterested([verifying_key, verifying_key, verifying_key], -1, 10, fake_squeak_hash), CInterested([verifying_key], 30, 2000), CInterested( nMinBlockHeight=0, nMaxBlockHeight=100, hashReplySqk=fake_squeak_hash, ), ] locator = CSqueakLocator(interested) stream = _BytesIO() locator.stream_serialize(stream) serialized = _BytesIO(stream.getvalue()) deserialized = CSqueakLocator.stream_deserialize(serialized) assert deserialized == locator assert len(deserialized.vInterested) == 3 assert deserialized.vInterested[0].pubkeys == (verifying_key, verifying_key, verifying_key) assert deserialized.vInterested[1].pubkeys == (verifying_key, ) assert deserialized.vInterested[2].pubkeys == () assert deserialized.vInterested[ 1].hashReplySqk == b'\x00' * HASH_LENGTH assert deserialized.vInterested[2].hashReplySqk == fake_squeak_hash
def __init__(self, subsample=1, dtype=_np.complex): self.n_layers = 7 # Number of components self.img_shape = [300, 300] # Spaital imaging shape self.x = _np.linspace(1, 199, self._N) self.y = _np.linspace(1, 199, self._M) if subsample > 1: self.x = self.x[::subsample] self.y = self.y[::subsample] self.img_shape = [self.y.size, self.x.size] self.dtype = dtype # Order of spectral array # A: amplitude # Omega: center frequency # Gamma: peak frequency width self.spec_order = ['Omega', 'A', 'Gamma'] # Filename prefix for concentration images self.__conc_img_prefix = 'Chem_Conc_' # Filename prefix for spectral array self.__spec_prefix = 'Chem_Spec_' self.layers = _np.zeros(self.img_shape + [self.n_layers]) self.spec_list = [] self.n_peak_list = [] # Final hyperspectral image self.hsi = None # Spectra array self.spectra = None # Frequency vector # For convenicence self.f or self.wn will work self._f = None try: for num in range(self.n_layers): gd_layer = _get_data( 'crikit.datasets', '{}{}{}'.format(self.__conc_img_prefix, num, '.csv')) self.layers[:, :, num] = _np.genfromtxt( _BytesIO(gd_layer), delimiter=',')[::subsample, ::subsample] gd_spec = _get_data( 'crikit.datasets', '{}{}{}'.format(self.__spec_prefix, num, '.csv')) self.spec_list.append( _np.genfromtxt(_BytesIO(gd_spec), delimiter=',')) except: print('Failed to import model layer and/or spectral information') else: print('Model spatial size: {}'.format(self.img_shape)) print('Model components/layers: {}'.format(self.n_layers))
def __init__(self, subsample=1, dtype=_np.complex): self.n_layers = 7 # Number of components self.img_shape = [300, 300] # Spaital imaging shape self.x = _np.linspace(1,199,self._N) self.y = _np.linspace(1,199,self._M) if subsample > 1: self.x = self.x[::subsample] self.y = self.y[::subsample] self.img_shape = [self.y.size, self.x.size] self.dtype = dtype # Order of spectral array # A: amplitude # Omega: center frequency # Gamma: peak frequency width self.spec_order = ['Omega','A','Gamma'] # Filename prefix for concentration images self.__conc_img_prefix = 'Chem_Conc_' # Filename prefix for spectral array self.__spec_prefix = 'Chem_Spec_' self.layers = _np.zeros(self.img_shape + [self.n_layers]) self.spec_list = [] self.n_peak_list = [] # Final hyperspectral image self.hsi = None # Spectra array self.spectra = None # Frequency vector # For convenicence self.f or self.wn will work self._f = None try: for num in range(self.n_layers): gd_layer = _get_data('crikit.datasets', '{}{}{}'.format(self.__conc_img_prefix, num, '.csv')) self.layers[:,:,num] = _np.genfromtxt(_BytesIO(gd_layer), delimiter=',')[::subsample,::subsample] gd_spec = _get_data('crikit.datasets', '{}{}{}'.format(self.__spec_prefix, num, '.csv')) self.spec_list.append(_np.genfromtxt(_BytesIO(gd_spec), delimiter=',')) except: print('Failed to import model layer and/or spectral information') else: print('Model spatial size: {}'.format(self.img_shape)) print('Model components/layers: {}'.format(self.n_layers))
def test_serialization(self): alert = CAlert() alert.setCancel = [1, 2, 3] alert.strComment = b"Comment" stream = _BytesIO() alert.stream_serialize(stream) serialized = _BytesIO(stream.getvalue()) deserialized = CAlert.stream_deserialize(serialized) self.assertEqual(deserialized, alert)
def test_serialization(self): inv = CInv() inv.type = 123 inv.hash = b"0" * 32 stream = _BytesIO() inv.stream_serialize(stream) serialized = _BytesIO(stream.getvalue()) deserialized = CInv.stream_deserialize(serialized) self.assertEqual(deserialized, inv)
def test_serialization(self): inv = CInv(type=1) stream = _BytesIO() inv.stream_serialize(stream) serialized = _BytesIO(stream.getvalue()) deserialized = CInv.stream_deserialize(serialized) assert deserialized.typemap[deserialized.type] == "Squeak" assert deserialized == inv
def predict_img(contents): # convert html object to pil image string = contents.split(';base64,')[-1] decoded = base64.b64decode(string) buff = _BytesIO(decoded) im = Image.open(buff) # resize image to 224x224 np array for input im = im.resize((224,224)) img_array = np.asarray(im) img_array = np.expand_dims(img_array, axis=0) global CANDY_IDX # predict candy type, assign '10' if probability is less than 0.98 pred = MODEL.predict(preprocess_input(img_array)) if np.amax(pred)>0.98: CANDY_IDX = np.argmax(pred, axis=1)[0] else: CANDY_IDX = 10 candy_name = CANDY_LIST[CANDY_IDX] return html.Div([ html.Hr(), html.H5(candy_name), # HTML images accept base64 encoded strings in the same format # that is supplied by the upload html.Img(src=contents, width='224', height='224'), html.Hr() ])
def open(path, mode="r"): if "w" in mode or "a" in mode: raise IOError(_errno.EINVAL, path, "Write access not supported") elif "r+" in mode: raise IOError(_errno.EINVAL, path, "Write access not supported") full_path = path path, rest = _locate(path) if not rest: return _open(path, mode) else: try: zf = _zipfile.ZipFile(path, "r") except _zipfile.error: raise IOError(_errno.ENOENT, full_path, "No such file or directory") try: data = zf.read(rest) except (_zipfile.error, KeyError): zf.close() raise IOError(_errno.ENOENT, full_path, "No such file or directory") zf.close() if mode == "rb": return _BytesIO(data) else: if _sys.version_info[0] == 3: data = data.decode("ascii") return _StringIO(data)
def encrypt(data, key, iv, threads=None, rsakey=None, state=None, padder=None): """Creates a MixSlice from plaintext data. Args: data (bytestr): The data to encrypt (multiple of MACRO_SIZE). key (bytestr): The key used for AES encryption (16 bytes long). iv (bytestr): The iv used for AES encryption (16 bytes long). threads (int): The number of threads used. (default: cpu count). rsakey (bytestr): The rsakey used for key derivation. If None, a new rsa keypair is generated. state (bytestr): The last state for key derivation. If None, a random state is generated. Returns: A new MixSlice that holds the encrypted fragments. """ padder = padder or _Padder(blocksize=_lib.MACRO_SIZE) padded_data = padder.pad(data) fragments = _mix_and_slice(data=padded_data, key=key, iv=iv, threads=threads) fragments = [_BytesIO(f) for f in fragments] metadata = _MixSliceMetadata(key=key, iv=iv, order=None, rsakey=rsakey, state=state) return MixSlice(fragments, metadata)
def stream_deserialize(cls, f, protover=PROTO_VERSION): recvbuf = ser_read(f, 4 + 12 + 4 + 4) # check magic if recvbuf[:4] != crown.params.MESSAGE_START: raise ValueError("Invalid message start '%s', expected '%s'" % (b2x(recvbuf[:4]), b2x(crown.params.MESSAGE_START))) # remaining header fields: command, msg length, checksum command = recvbuf[4:4+12].split(b"\x00", 1)[0] msglen = struct.unpack(b"<i", recvbuf[4+12:4+12+4])[0] checksum = recvbuf[4+12+4:4+12+4+4] # read message body recvbuf += ser_read(f, msglen) msg = recvbuf[4+12+4+4:4+12+4+4+msglen] th = hashlib.sha256(msg).digest() h = hashlib.sha256(th).digest() if checksum != h[:4]: raise ValueError("got bad checksum %s" % repr(recvbuf)) recvbuf = recvbuf[4+12+4+4+msglen:] if command in messagemap: cls = messagemap[command] # print("Going to deserialize '%s'" % msg) return cls.msg_deser(_BytesIO(msg)) else: print("Command '%s' not in messagemap" % repr(command)) return None
def _read_range(self, start, end=0): """ Read a range of bytes in stream. Args: start (int): Start stream position. end (int): End stream position. 0 To not specify end. Returns: bytes: number of bytes read """ stream = _BytesIO() try: with _handle_azure_exception(): self._get_to_stream( stream=stream, start_range=start, end_range=(end - 1) if end else None, **self._client_kwargs, ) except _AzureHttpError as exception: if exception.status_code == 416: return bytes() raise return stream.getvalue()
def encrypt(data, path, key, iv, threads=None, padder=None): """Creates a MixSlice from plaintext data. Args: data (bytestr): The data to encrypt (multiple of MACRO_SIZE). key (bytestr): The key used for AES encryption (16 bytes long). iv (bytestr): The iv used for AES encryption (16 bytes long). threads (int): The number of threads used. (default: cpu count). Returns: A new MixSlice that holds the encrypted fragments. """ padder = padder or _Padder(blocksize=MixSlice.MACRO_SIZE) padded_data = padder.pad(data) fragments = _mix_and_slice(data=padded_data, key=key, iv=iv, threads=threads) fragments = [_BytesIO(f) for f in fragments] if not _os.path.exists(path): _os.makedirs(path) name = "frag_%%0%dd.dat" % len(str(len(fragments))) for fragid in range(len(fragments)): fragment = fragments[fragid] assert isinstance(fragment, _BytesIO) fragment.seek(0) destination = _os.path.join(path, name % fragid) with open(destination, "wb") as fp: _shutil.copyfileobj(fragment, fp) fragment.close()
def _read_trace_states(path_or_bytes): states = _trace_extract(path_or_bytes, blk_name='States') if states is None: return None df = _pd.read_csv(_BytesIO(states), delimiter='\t', usecols=[1, 2, 3, 4, 5, 6, 7, 8, 9], skipinitialspace=True, dtype={ 'SAT': PRN_CATEGORY, 'TYPE': STATE_TYPES_CATEGORY }, keep_default_na=False, comment='#', header=None, names=[ 'TIME', 'TYPE', 'SITE', 'SAT', 'NUM', 'EST', 'VAR', 'ADJ', 'BLK' ], parse_dates=['TIME']) # type:ignore df.TIME = (df.TIME.values - _J2000_ORIGIN).astype('timedelta64[s]').astype(int) empty_mask = df.TYPE.values.notna() # dropping ONE type if (~empty_mask).sum() > 0: df = df[empty_mask] return df.set_index(['TIME', 'SITE', 'TYPE', 'SAT', 'NUM', 'BLK'])
def stream_deserialize(cls, f, protover=PROTO_VERSION): recvbuf = ser_read(f, 4 + 12 + 4 + 4) # check magic if recvbuf[:4] != bitcoin.params.MESSAGE_START: raise ValueError("Invalid message start '%s', expected '%s'" % (b2x(recvbuf[:4]), b2x(bitcoin.params.MESSAGE_START))) # remaining header fields: command, msg length, checksum command = recvbuf[4:4+12].split(b"\x00", 1)[0] msglen = struct.unpack(b"<i", recvbuf[4+12:4+12+4])[0] checksum = recvbuf[4+12+4:4+12+4+4] # read message body recvbuf += ser_read(f, msglen) msg = recvbuf[4+12+4+4:4+12+4+4+msglen] th = hashlib.sha256(msg).digest() h = hashlib.sha256(th).digest() if checksum != h[:4]: raise ValueError("got bad checksum %s" % repr(recvbuf)) recvbuf = recvbuf[4+12+4+4+msglen:] if command in messagemap: cls = messagemap[command] # print("Going to deserialize '%s'" % msg) return cls.msg_deser(_BytesIO(msg)) else: print("Command '%s' not in messagemap" % repr(command)) return None
def zip_install(url, sha1, install_directory): """Download and install a zipped bundle""" if not os.path.isdir(install_directory): zip_bytes = download(url=url, sha1=sha1) zip_io = _BytesIO(zip_bytes) zip_file = zipfile.ZipFile(zip_io) os.makedirs(install_directory) zip_file.extractall(install_directory)
def _encode_batched_write_command(namespace, operation, command, docs, opts, ctx): """Encode the next batched insert, update, or delete command.""" buf = _BytesIO() to_send, _ = _batched_write_command_impl(namespace, operation, command, docs, opts, ctx, buf) return buf.getvalue(), to_send
def SignatureHash(script, txTo, inIdx, hashtype, amount=None, sigversion=SIGVERSION_BASE): """Calculate a signature hash 'Cooked' version that checks if inIdx is out of bounds - this is *not* consensus-correct behavior, but is what you probably want for general wallet use. """ if sigversion == SIGVERSION_WITNESS_V0: hashPrevouts = b'\x00'*32 hashSequence = b'\x00'*32 hashOutputs = b'\x00'*32 if not (hashtype & SIGHASH_ANYONECANPAY): serialize_prevouts = bytes() for i in txTo.vin: serialize_prevouts += i.prevout.serialize() hashPrevouts = bitcoin.core.Hash(serialize_prevouts) if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE): serialize_sequence = bytes() for i in txTo.vin: serialize_sequence += struct.pack("<I", i.nSequence) hashSequence = bitcoin.core.Hash(serialize_sequence) if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE): serialize_outputs = bytes() for o in txTo.vout: serialize_outputs += o.serialize() hashOutputs = bitcoin.core.Hash(serialize_outputs) elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)): serialize_outputs = txTo.vout[inIdx].serialize() hashOutputs = bitcoin.core.Hash(serialize_outputs) f = _BytesIO() f.write(struct.pack("<i", txTo.nVersion)) f.write(hashPrevouts) f.write(hashSequence) txTo.vin[inIdx].prevout.stream_serialize(f) BytesSerializer.stream_serialize(script, f) f.write(struct.pack("<q", amount)) f.write(struct.pack("<I", txTo.vin[inIdx].nSequence)) f.write(hashOutputs) f.write(struct.pack("<i", txTo.nLockTime)) f.write(struct.pack("<i", hashtype)) return bitcoin.core.Hash(f.getvalue()) if script.is_witness_scriptpubkey(): print("WARNING: You seem to be attempting to sign a scriptPubKey from an") print("WARNING: output with segregated witness. This is NOT the correct") print("WARNING: thing to sign. You should pass SignatureHash the corresponding") print("WARNING: P2WPKH or P2WSH script instead.") (h, err) = RawSignatureHash(script, txTo, inIdx, hashtype) if err is not None: raise ValueError(err) return h
def stream_deserialize(cls, f): assert ser_read(f, 1) == b"\x30" rs = BytesSerializer.stream_deserialize(f) f = _BytesIO(rs) assert ser_read(f, 1) == b"\x02" r = BytesSerializer.stream_deserialize(f) assert ser_read(f, 1) == b"\x02" s = BytesSerializer.stream_deserialize(f) return cls(r, s, len(r + s))
def b64_to_pil(string): decoded = base64.b64decode(string) buffer = _BytesIO(decoded) im = Image.open(buffer) im = cv2.cvtColor(np.array(im), cv2.COLOR_RGB2BGR) return im
def _encode_batched_op_msg(operation, command, docs, ack, opts, ctx): """Encode the next batched insert, update, or delete operation as OP_MSG. """ buf = _BytesIO() to_send, _ = _batched_op_msg_impl(operation, command, docs, ack, opts, ctx, buf) return buf.getvalue(), to_send
def install_nano(install_directory): """Download and install the nano text editor""" url = "http://www.nano-editor.org/dist/v2.2/NT/nano-2.2.6.zip" r = _urlopen(url) nano_zip_content = _BytesIO(r.read()) nano_zip = zipfile.ZipFile(nano_zip_content) nano_files = ['nano.exe', 'cygwin1.dll', 'cygintl-8.dll', 'cygiconv-2.dll', 'cyggcc_s-1.dll'] for file_name in nano_files: nano_zip.extract(file_name, install_directory)
def _read_trace_LC(path_or_bytes): '''Parses the LC combo block of the trace files producing a single dataframe. WORK-IN-PROGRESS''' # regex search string if isinstance(path_or_bytes, str): trace_content = path2bytes(path_or_bytes) # will accept .trace.Z also else: trace_content = path_or_bytes trace_LC_list = _RE_TRACE_LC.findall(string=trace_content) LC_bytes = b''.join(trace_LC_list) LC_bytes = LC_bytes.replace(b'=', b'') #getting rif of '=' df_LC = _pd.read_csv(_BytesIO(LC_bytes), delim_whitespace=True, header=None, usecols=[1, 2, 4, 6, 8, 9, 10, 11, 12, 13]).astype({ 1: _np.int16, 2: _np.int32, 4: '<U3', 6: '<U1', 8: '<U4', 9: _np.float_, 10: '<U4', 11: _np.float_, 12: '<U4', 13: _np.float_ }) df_LC.columns = ['W', 'S', 'PRN', 'LP', 8, 9, 10, 11, 12, 13] df_LC['time'] = _gpsweeksec2datetime(gps_week=df_LC.W, tow=df_LC.S, as_j2000=True) df_LC.drop(columns=['W', 'S'], inplace=True) df1 = df_LC[['time', 'PRN', 'LP', 8, 9]] df1.columns = ['time', 'PRN', 'LP', 'combo', 'value'] df2 = df_LC[['time', 'PRN', 'LP', 10, 11]] df2.columns = ['time', 'PRN', 'LP', 'combo', 'value'] df3 = df_LC[['time', 'PRN', 'LP', 12, 13]] df3.columns = ['time', 'PRN', 'LP', 'combo', 'value'] df_LC = _pd.concat([df1, df2, df3], axis=0) return df_LC.set_index(['time'])
def SignatureHash(script, txTo, inIdx, hashtype, amount=None, sigversion=SIGVERSION_BASE): """Calculate a signature hash 'Cooked' version that checks if inIdx is out of bounds - this is *not* consensus-correct behavior, but is what you probably want for general wallet use. """ if sigversion == SIGVERSION_WITNESS_V0: hashPrevouts = b'\x00'*32 hashSequence = b'\x00'*32 hashOutputs = b'\x00'*32 if not (hashtype & SIGHASH_ANYONECANPAY): serialize_prevouts = bytes() for i in txTo.vin: serialize_prevouts += i.prevout.serialize() hashPrevouts = bitcoin.core.Hash(serialize_prevouts) if (not (hashtype & SIGHASH_ANYONECANPAY) and (hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE): serialize_sequence = bytes() for i in txTo.vin: serialize_sequence += struct.pack("<I", i.nSequence) hashSequence = bitcoin.core.Hash(serialize_sequence) if ((hashtype & 0x1f) != SIGHASH_SINGLE and (hashtype & 0x1f) != SIGHASH_NONE): serialize_outputs = bytes() for o in txTo.vout: serialize_outputs += o.serialize() hashOutputs = bitcoin.core.Hash(serialize_outputs) elif ((hashtype & 0x1f) == SIGHASH_SINGLE and inIdx < len(txTo.vout)): serialize_outputs = txTo.vout[inIdx].serialize() hashOutputs = bitcoin.core.Hash(serialize_outputs) f = _BytesIO() f.write(struct.pack("<i", txTo.nVersion)) f.write(hashPrevouts) f.write(hashSequence) txTo.vin[inIdx].prevout.stream_serialize(f) BytesSerializer.stream_serialize(script, f) f.write(struct.pack("<q", amount)) f.write(struct.pack("<I", txTo.vin[inIdx].nSequence)) f.write(hashOutputs) f.write(struct.pack("<i", txTo.nLockTime)) f.write(struct.pack("<i", hashtype)) return bitcoin.core.Hash(f.getvalue()) assert not script.is_witness_scriptpubkey() (h, err) = RawSignatureHash(script, txTo, inIdx, hashtype) if err is not None: raise ValueError(err) return h
def _readall(self): """ Read and return all the bytes from the stream until EOF. Returns: bytes: Object content """ stream = _BytesIO() with _handle_azure_exception(): self._get_to_stream(stream=stream, **self._client_kwargs) return stream.getvalue()
def zip_install(url, sha1, install_directory): """Download and install a zipped bundle""" if not os.path.isdir(install_directory): zip_bytes = download(url=url, sha1=sha1) zip_io = _BytesIO(zip_bytes) zip_file = zipfile.ZipFile(zip_io) LOG.info('installing {} into {}'.format(url, install_directory)) os.makedirs(install_directory) zip_file.extractall(install_directory) else: LOG.info('existing installation at {}'.format(install_directory))
def step_encrypt(self, fragment_id=None): fragment_id = (fragment_id if fragment_id is not None else _random.randrange(len(self._fragments))) key = self._metadata.add_encryption_step(fragment_id) ctr = _Counter.new(128) cipher = _AES.new(key[:16], mode=_AES.MODE_CTR, counter=ctr) _logging.info("Encrypting fragment #%d" % fragment_id) self._fragments[fragment_id] = _BytesIO( cipher.encrypt(self._read_fragment(self._fragments[fragment_id]))) self._changed.add(fragment_id) return fragment_id
def test_serialization(self): fake_payment_str = "fakepaymentstr".encode('utf-8') fake_host = "foo.com".encode('utf-8') port = 5678 offer = COffer( strPaymentInfo=fake_payment_str, host=fake_host, port=port, ) stream = _BytesIO() offer.stream_serialize(stream) serialized = _BytesIO(stream.getvalue()) deserialized = COffer.stream_deserialize(serialized) # assert deserialized.typemap[deserialized.type] == "Squeak" assert deserialized.strPaymentInfo == fake_payment_str assert deserialized == offer
def read_data(path): """Reads data from .pickle file. Args: path: Path tp .pickle file to read. Returns: Python object. """ with _BytesIO(_tf.io.read_file(path).numpy()) as file: return _load(file)
def update_graph(selected_dropdown_value): image = cv2.imread('data/IMG_20190406_135457.jpg') if selected_dropdown_value == 'grayscale': img = grayscale(image) if selected_dropdown_value == 'hsv': img = hsv(image) im_pil = Image.fromarray(img) buff = _BytesIO() im_pil.save(buff, format='png') encoded = base64.b64encode(buff.getvalue()).decode("utf-8") return dcc.Graph(id='interactive-image', figure={ 'data': [], 'layout': { 'margin': go.layout.Margin(l=40, b=40, t=26, r=10), 'xaxis': { 'range': (0, 400), 'scaleanchor': 'y', 'scaleratio': 1, 'showgrid': False }, 'yaxis': { 'range': (0, 400), 'showgrid': False }, 'images': [{ 'xref': 'x', 'yref': 'y', 'x': 0, 'y': 0, 'yanchor': 'bottom', 'sizing': 'stretch', 'sizex': 400, 'sizey': 400, 'layer': 'below', 'source': 'data:image/png;base64, ' + encoded, }], } })
def _snx_extract(snx_bytes, stypes, obj_type, verbose=True): # obj_type= matrix or vector if obj_type == 'MATRIX': stypes_dict = { 'EST': 'SOLUTION/MATRIX_ESTIMATE', 'APR': 'SOLUTION/MATRIX_APRIORI', 'NEQ': 'SOLUTION/NORMAL_EQUATION_MATRIX' } elif obj_type == 'VECTOR': stypes_dict = { 'EST': 'SOLUTION/ESTIMATE', 'APR': 'SOLUTION/APRIORI', 'NEQ': 'SOLUTION/NORMAL_EQUATION_VECTOR', 'ID': 'SITE/ID' } snx_buffer = b'' stypes_form, stypes_content, stypes_rows = {}, {}, {} objects_in_buf = 0 for stype in stypes: if stype in stypes_dict.keys(): remove_header = objects_in_buf != 0 if (objects_in_buf == 0) & ( obj_type == 'MATRIX' ): # override matrix header as comments may be present snx_buffer += b'*PARA1 PARA2 ____PARA2+0__________ ____PARA2+1__________ ____PARA2+2__________\n' remove_header = True stype_extr = _snx_extract_blk(snx_bytes=snx_bytes, blk_name=stypes_dict[stype], remove_header=remove_header) if stype_extr is not None: snx_buffer += stype_extr[0] stypes_rows[stype] = stype_extr[1] stypes_form[stype] = stype_extr[2] #dict of forms stypes_content[stype] = stype_extr[3] #dict of content objects_in_buf += 1 else: _logging.error(f'{stype} ({stypes_dict[stype]}) blk not found') # return None objects_in_buf += 1 else: if verbose: _logging.error(f'{stype} blk not supported') stypes = list(stypes_rows.keys()) n_stypes = len(stypes) #existing stypes only if n_stypes == 0: if verbose: _logging.error('nothing found') return None return _BytesIO(snx_buffer), stypes_rows, stypes_form, stypes_content
def deserialize(cls, buf, allow_padding=False): """Deserialize bytes, returning an instance allow_padding - Allow buf to include extra padding. (default False) If allow_padding is False and not all bytes are consumed during deserialization DeserializationExtraDataError will be raised. """ fd = _BytesIO(buf) r = cls.stream_deserialize(fd) if not allow_padding: padding = fd.read() if len(padding) != 0: raise DeserializationExtraDataError('Not all bytes consumed during deserialization', r, padding) return r
def zip_install(url, sha1, install_directory): """Download and install a zipped bundle of compiled software""" r = _urlopen(url) zip_bytes = r.read() download_sha1 = hashlib.sha1(zip_bytes).hexdigest() if download_sha1 != sha1: raise ValueError( 'downloaded {!r} has the wrong SHA1 hash: {} != {}'.format( url, downloaded_sha1, sha1)) zip_io = _BytesIO(zip_bytes) zip_file = zipfile.ZipFile(zip_io) if not os.path.isdir(install_directory): os.makedirs(install_directory) zip_file.extractall(install_directory)
def read_pea_partials(path): partials = path2bytes(path) begin = partials.find(b"End_of_Header") + 13 df = _pd.read_csv(_BytesIO(partials[begin:]), header=None, delim_whitespace=True, usecols=[0, 1, 2, 9, 10, 11], names=[None, 'MJD', 'TOD', 'X', 'Y', 'Z']) df_done = df[['X', 'Y', 'Z']].set_index([ _mjd2j2000(df.MJD.values, df.TOD.values, pea_partials=True), df.iloc[:, 0].astype(_PRN_CATEGORY) ]) df_partials = _pd.concat([df_done], keys=['EST'], axis=1) / 1000 df_partials.attrs['path'] = path return df_partials
def read_clk(clk_path): content = path2bytes(str(clk_path)) data_b = content.find(b'END OF HEADER') + 13 data_b += content[data_b:data_b + 20].find(b'\n') + 1 data = content[data_b:] data_line = _RE_LINE.search(data) assert data_line is not None len_line = len( data_line.groups()[0]) # need to get a line and check the length clk_cols = [0, 1, 2, 3, 4, 5, 6, 7, 9] clk_names = ['A', 'CODE', 'Y', 'M', 'D', 'h', 'm', 's', 'EST'] if len_line > 59: # if len over 59 -> expect STD column presence clk_cols += [10] clk_names += ['STD'] clk_df = _pd.read_csv( _BytesIO(data), delim_whitespace=True, header=None, usecols=clk_cols, names=clk_names, # type:ignore dtype={ 'A': CLK_TYPE_CATEGORY, 'CODE': object, 'Y': _np.uint16, 'M': _np.uint16, 'D': _np.uint16, 'h': _np.int32, 'm': _np.int32, 's': _np.float_, }) date = (((clk_df.Y.values - 1970).astype('datetime64[Y]').astype('datetime64[M]') + clk_df.M.values - 1).astype('datetime64[D]') + clk_df.D.values - 1) time = (clk_df.h.values * 3600 + clk_df.m.values * 60 + clk_df.s.values).astype('timedelta64[s]') j2000time = (date + time - _J2000_ORIGIN).astype(int) clk_df.drop(columns=['Y', 'M', 'D', 'h', 'm', 's'], inplace=True) clk_df.set_index(['A', j2000time, 'CODE'], inplace=True) clk_df.index.names = (['A', 'J2000', 'CODE']) return clk_df
def zip_install(url, sha1, install_directory, path=None): """Download and install a zipped bundle""" if path is None: path = install_directory if not os.path.exists(path): zip_bytes = download(url=url, sha1=sha1) zip_io = _BytesIO(zip_bytes) zip_file = zipfile.ZipFile(zip_io) LOG.info('installing {} into {}'.format(url, install_directory)) try: os.makedirs(install_directory) except _MakeDirsError: pass zip_file.extractall(install_directory) else: LOG.info('existing installation at {}'.format(install_directory))
def to_bytes(self): f = _BytesIO() self.msg_ser(f) body = f.getvalue() res = bitcoin.params.MESSAGE_START res += self.command res += b"\x00" * (12 - len(self.command)) res += struct.pack(b"<I", len(body)) # add checksum th = hashlib.sha256(body).digest() h = hashlib.sha256(th).digest() res += h[:4] res += body return res
def to_bytes(self): f = _BytesIO() self.msg_ser(f) body = f.getvalue() res = crown.params.MESSAGE_START res += self.command res += b"\x00" * (12 - len(self.command)) res += struct.pack(b"<I", len(body)) # add checksum th = hashlib.sha256(body).digest() h = hashlib.sha256(th).digest() res += h[:4] res += body return res
def tar_install(url, sha1, install_directory, compression='*', strip_components=0): """Download and install a tar bundle""" if not os.path.isdir(install_directory): tar_bytes = download(url=url, sha1=sha1) tar_io = _BytesIO(tar_bytes) filename = os.path.basename(url) mode = 'r:{}'.format(compression) tar_file = tarfile.open(filename, mode, tar_io) os.makedirs(install_directory) members = [ transform(tarinfo=tarinfo, strip_components=strip_components) for tarinfo in tar_file] tar_file.extractall( path=install_directory, members=[m for m in members if m is not None])
def open(path, mode='r'): if 'w' in mode or 'a' in mode: raise IOError( _errno.EINVAL, path, "Write access not supported") elif 'r+' in mode: raise IOError( _errno.EINVAL, path, "Write access not supported") full_path = path path, rest = _locate(path) if not rest: return _open(path, mode) else: try: zf = _zipfile.ZipFile(path, 'r') except _zipfile.error: raise IOError( _errno.ENOENT, full_path, "No such file or directory") try: data = zf.read(rest) except (_zipfile.error, KeyError): zf.close() raise IOError( _errno.ENOENT, full_path, "No such file or directory") zf.close() if mode == 'rb': return _BytesIO(data) else: if _sys.version_info[0] == 3: data = data.decode('ascii') return _StringIO(data)
def StringIO(*args): if args and isinstance(args[0], bytes): args = (args[0].decode("UTF-8"),) return _BytesIO(*args)
def BytesIO(*args): if args and isinstance(args[0], unicode): args = (args[0].encode("UTF-8"),) return _BytesIO(*args)
def deserialize(cls, buf): return cls.stream_deserialize(_BytesIO(buf))
def from_bytes(cls, b, protover=PROTO_VERSION): f = _BytesIO(b) return MsgSerializable.stream_deserialize(f, protover=protover)
def serialize(self): """Serialize, returning bytes""" f = _BytesIO() self.stream_serialize(f) return f.getvalue()
def deserialize(cls, buf): if isinstance(buf, str) or isinstance(buf, bytes): buf = _BytesIO(buf) return cls.stream_deserialize(buf)
def serialize(cls, obj): f = _BytesIO() cls.stream_serialize(obj, f) return f.getvalue()
def serialize(self, params={}): """Serialize, returning bytes""" f = _BytesIO() self.stream_serialize(f, **params) return f.getvalue()
def __init__(self, *args, **kwargs): super(WaveRecord, self).__init__(*args, **kwargs) self.wave = _loadibw(_BytesIO(bytes(self.data)))