def load_cbor(m): """ :param m: CBOR Encoded message :type m: hex encoded string (each character is a two digit hex value) :raise SyntaxError: Malformed CBOR encoded message :raise ValueError: Malformed CBOR encoded message :raise cbor2.decoder.CBORDecodeError: Malformed CBOR encoded message """ if os.path.isfile(m): try: with open(m, 'rb') as f: rtn = cbor2.load(f) except (SyntaxError, ValueError, cbor2.decoder.CBORDecodeError) as e: raise e elif type(m) in [str, bytes]: try: rtn = cbor2.load(StringIO(m)) if rtn is not dict: rtn = cbor2.loads(binascii.unhexlify(m)) except (SyntaxError, ValueError, cbor2.decoder.CBORDecodeError) as e1: try: rtn = cbor2.loads(binascii.unhexlify(m)) except (SyntaxError, ValueError, cbor2.decoder.CBORDecodeError) as e2: raise e2 else: raise Exception('Cannot load cbor, improperly formatted') return Utils.defaultEncode(rtn)
def load_txtypes(): global txtypes, examples try: with open("txtypes.cbor", "rb") as fp: txtypes = cbor2.load(fp) except FileNotFoundError: txtypes = {} try: with open("examples.cbor", "rb") as fp: examples = cbor2.load(fp) except FileNotFoundError: examples = {}
def load_cbor(m): """ :param msg: CBOR Encoded message :type msg: hex encoded string (each character is a two digit hex value) :raise SyntaxError: Malformed CBOR encoded message :raise ValueError: Malformed CBOR encoded message :raise cbor2.decoder.CBORDecodeError: Malformed CBOR encoded message """ if os.path.isfile(m): try: with open(m, 'rb') as f: rtn = cbor2.load(f) except (SyntaxError, ValueError, cbor2.decoder.CBORDecodeError) as e: raise e elif type(m) == str: try: rtn = cbor2.loads(m) if type(rtn) is not dict: rtn = cbor2.loads(''.join( [m[i:i + 2].decode('hex') for i in range(0, len(m), 2)])) except (SyntaxError, ValueError, cbor2.decoder.CBORDecodeError) as e: raise e else: raise Exception('Cannot load cbor, improperly formatted') return json.loads(json.dumps(rtn))
def import_cbor(graph, file_name): t1 = time.time() with open(file_name, 'rb') as fd: data = cbor2.load(fd) graph.from_dict(data) t2 = time.time() print('Imported CBOR in `{:.4f}` seconds.'.format(t2 - t1)) return graph
def parse_file(path): with open(path, 'rb') as f: data = f.read() try: o = cbor.load(data) except Exception as e: sys.exit(1)
def read_next_cbor(f, timeout=None, waiting_for=None): """ Raises StopIteration if it is EOF. Raises TimeoutError if over timeout""" wait_for_data(f, timeout, waiting_for) try: j = cbor.load(f) return j except OSError as e: if e.errno == 29: raise StopIteration raise
def _main(): args = _parse_args() try: array = cbor2.load(args.cbor) except cbor2.CBORDecodeError as de: die("CBOR decoding error:" + str(de)) # translate tags for e in array: assert len(e) >= 2 e[1] = TAGS[e[1]] if e[1] in TAGS else "MissingTag" pprint.pprint(array, indent=args.indent, depth=args.depth)
def add_custom_certificate(cbor_file, custom_cert_name): # Generate EC key pair privatekey = ec.generate_private_key(ec.SECP256R1(), backends.default_backend()) privatebytes = privatekey.private_bytes( encoding=serialization.Encoding.DER, format=serialization.PrivateFormat.PKCS8, encryption_algorithm=serialization.NoEncryption()) publickey = privatekey.public_key() publicbytes = publickey.public_bytes( encoding=serialization.Encoding.DER, format=serialization.PublicFormat.SubjectPublicKeyInfo) # Create X509 self-signed certificate subject = issuer = x509.Name([ x509.NameAttribute(x509.NameOID.COUNTRY_NAME, u"FI"), x509.NameAttribute(x509.NameOID.STATE_OR_PROVINCE_NAME, u"Oulu"), x509.NameAttribute(x509.NameOID.LOCALITY_NAME, u"Oulu"), x509.NameAttribute(x509.NameOID.ORGANIZATION_NAME, u"ARM"), x509.NameAttribute(x509.NameOID.COMMON_NAME, text_type(custom_cert_name)) ]) cert = x509.CertificateBuilder().subject_name(subject).issuer_name( issuer).public_key(publickey).serial_number( x509.random_serial_number()).not_valid_before( datetime.datetime.utcnow()).not_valid_after( # Our certificate will be valid for 1 year datetime.datetime.utcnow() + datetime.timedelta(days=365)).add_extension( x509.SubjectAlternativeName( [x509.DNSName(u"localhost")]), critical=False, # Sign our certificate with our private key ).sign(privatekey, hashes.SHA256(), backends.default_backend()) certbytes = cert.public_bytes(serialization.Encoding.DER) cbor_data = None with open(cbor_file, 'rb') as in_file: cbor_data = cbor2.load(in_file) privatekey_data = Key(privatebytes, 'der', custom_cert_name, 'ECCPrivate')._asdict() publickey_data = Key(publicbytes, 'der', custom_cert_name, 'ECCPublic')._asdict() cbor_data['Keys'].append(privatekey_data) cbor_data['Keys'].append(publickey_data) cert_data = Certificate(certbytes, 'der', custom_cert_name)._asdict() cbor_data['Certificates'].append(cert_data) with open(cbor_file, 'wb') as out_file: cbor2.dump(cbor_data, out_file)
def print_cbor(cbor_file): cbor_data = None with open(cbor_file, 'rb') as in_file: cbor_data = cbor2.load(in_file) for k in ['Keys', 'Certificates', 'ConfigParams']: v = cbor_data.get(k) print(v) print(k) if v is None: continue for item in v: for kk, vv in iteritems(item): print("\t" + text_type(kk) + " : " + repr(vv)) print('\t------------------------------') print('\r\n')
def load(input_file_path): ext = get_extension(input_file_path).lower() if (ext == '.json'): with open(input_file_path, 'r') as input_file: return json.load(input_file) elif (ext == '.cbor'): with open(input_file_path, 'rb') as input_file: return cbor2.load(input_file) elif (ext == '.xlsx'): tree = A205XLSXTree() return tree.load_workbook(input_file_path).get_content() elif (ext == '.yaml') or (ext == '.yml'): with open(input_file_path, 'r') as input_file: return yaml.load(input_file, Loader=yaml.FullLoader) else: raise Exception(f"Unsupported input \"{ext}\".")
def load(self, filename): """ Load font from filename :param filename: the filename of the file containing the font data :type filename: str :return: a font object :rtype: luma.core.bitmap_font """ with open(filename, 'rb') as fp: s = fp.readline() if s != b'LUMA.CORE.BITMAP_FONT\n': raise SyntaxError('Not a luma.core.bitmap_font file') fontdata = cbor2.load(fp) self._load_fontdata(fontdata) return self
def load(path): """ Return data read from file path as dict file may be either json, msgpack, or cbor given by extension .json, .mgpk, or .cbor respectively Otherwise raise IOError """ root, ext = os.path.splitext(path) if ext == '.json': with ocfn(path, "rb") as f: it = json.load(f) elif ext == '.mgpk': with ocfn(path, "rb") as f: it = msgpack.load(f) elif ext == '.cbor': with ocfn(path, "rb") as f: it = cbor.load(f) else: raise IOError(f"Invalid file path ext '{path}' " f"not '.json', '.mgpk', or 'cbor'.") return it
#!/usr/bin/env python3 import numpy as np import matplotlib.pyplot as plt import matplotlib.patches as patches import cbor2 fig = plt.figure() ax1 = fig.add_subplot(1, 1, 1) state = cbor2.load(open('state.cbor', 'rb')) for patch in state['primitive'][:]: i0 = patch['rect'][0]['start'] j0 = patch['rect'][1]['start'] i1 = patch['rect'][0]['end'] j1 = patch['rect'][1]['end'] x, y = np.meshgrid(range(i0, i1 + 1), range(j0, j1 + 1)) data = np.array(patch['data']).reshape( [i1 - i0, j1 - j0, patch['num_fields']]) cm = ax1.pcolormesh(x, y, data[:, :, 0].T, vmin=0.0, vmax=1.0) box = patches.Rectangle((i0, j0), i1 - i0, j1 - j0, linewidth=0.5, edgecolor='k', fill=False) ax1.add_patch(box) ax1.set_aspect('equal') fig.colorbar(cm) plt.show()
#!/usr/bin/env python3 import numpy as np import matplotlib.pyplot as plt import matplotlib.patches as patches import cbor2 import sys fig = plt.figure() ax1 = fig.add_subplot(1, 1, 1) for filename in sys.argv[1:]: state = cbor2.load(open(filename, 'rb')) for patch in state['primitive'][:]: i0 = patch['rect'][0]['start'] j0 = patch['rect'][1]['start'] i1 = patch['rect'][0]['end'] j1 = patch['rect'][1]['end'] x, y = np.meshgrid(range(i0, i1 + 1), range(j0, j1 + 1)) data = np.array(patch['data']).reshape([i1 - i0, j1 - j0, patch['num_fields']]) cm = ax1.pcolormesh(x, y, data[:,:,0].T, vmin=0.0, vmax=1.0) box = patches.Rectangle((i0, j0), i1 - i0, j1 - j0, linewidth=0.5, edgecolor='k', fill=False) ax1.add_patch(box) ax1.set_aspect('equal') fig.colorbar(cm) plt.show()
import matplotlib.pyplot as plt import json import numpy as np import json import cbor2 output = None file_name = "history_separated_pair_cil=60_cal=None_adh=None_coa=24_seed=8743.cbor" with open(file_name, mode='rb') as sf: output = cbor2.load(sf) tsteps = [o[0] for o in output] frequency = tsteps[1] - tsteps[0] state_recs = [o[1] for o in output] interactions = [rec["interactions"] for rec in state_recs] # x_cals_0_4 = [inters[0]["x_cals"][4] for inters in interactions] # x_cals_1_12 = [inters[1]["x_cals"][12] for inters in interactions] # plt.plot(tsteps, x_cals_0_4, color="black", marker=".") # plt.plot(tsteps, x_cals_1_12, color="green", marker=".") # interactions = [rec["interactions"] for rec in state_recs] # x_cals_0_4 = [inters[0]["x_cals"][4] for inters in interactions] # x_cals_1_12 = [inters[1]["x_cals"][12] for inters in interactions] # plt.plot(tsteps, x_cals_0_4, color="black", marker=".") # plt.plot(tsteps, x_cals_1_12, color="green", marker=".")
def deserialize_file(File_name): with open(File_name, 'rb') as fp: obj = cbor2.load(fp)
def read(self) -> Union[Dict, TreeNode]: """ I return the best representation the source format supports pickle: TreeNode else : Dict[inode -> properties] """ fn = self._path() if self.filetype == FileType.PICKLE: with open(fn, "rb") as f: self.treenode = pickle.load(f) return self.treenode elif self.filetype == FileType.CSV: self.id_dict = {} with open(fn, "r") as f: r = csv.DictReader(f) for line in r: # type conversion for field in [k for k,v in Node._field_types.items() if v != str]: line[field] = int(line[field]) self.id_dict[int(line['id'])] = Node(**line) return self.id_dict elif self.filetype == FileType.MSGPACK: # TODO: This will fail with larger files - have to adjust max_xxx_len with open(fn, "rb") as f: self.id_dict = {} for item in msgpack.unpack(f, raw=False): self.id_dict[item['id']] = Node(**item) return self.id_dict elif self.filetype == FileType.JSON: return self._json_read(fn, json.load) elif self.filetype == FileType.UJSON: return self._json_read(fn, ujson.load) elif self.filetype == FileType.SIMPLEJSON: # NOTE: simplejson includes key names when serializing NamedTuples with open(fn, "r") as f: self.id_dict = {} if self.json_dict_list: for item in simplejson.load(f): self.id_dict[item['id']] = Node(**item) else: for v in simplejson.load(f).values(): self.id_dict[v['id']] = Node(**v) return self.id_dict elif self.filetype == FileType.CBOR2: with open(fn, "rb") as f: self.id_dict = {} for item in cbor2.load(f): self.id_dict[item['id']] = Node(**item) return self.id_dict elif self.filetype == FileType.CBOR: with open(fn, "rb") as f: self.id_dict = {} for item in cbor.load(f): self.id_dict[item['id']] = Node(**item) return self.id_dict elif self.filetype == FileType.RAPIDJSON: self.id_dict = {} with open(fn, "r") as f: d = rapidjson.Decoder(number_mode=rapidjson.NM_NATIVE)(f) if self.json_dict_list: for item in d: # safer cause key names are included, but slower self.id_dict[item['id']] = Node(**item) else: # list(self.id_dict.values()) - produces a list of lists for item in d: self.id_dict[item[0]] = Node._make(item) return self.id_dict elif self.filetype == FileType.BSON: self.id_dict = {} with open(fn, "rb") as f: for doc in decode_file_iter(f): self.id_dict[doc['id']] = Node(**doc) return self.id_dict
#!/usr/bin/env python3 import cbor2 import sys obj = cbor2.load(sys.stdin.buffer) print(obj)
def parse(self, stream, *args, **kwargs): return cbor2.load(stream)
#!/usr/bin/env python3 import cbor2 import json import sys source = sys.argv[1] target = sys.argv[1][:-5] + '.json' if sys.argv[1].endswith( '.cbor') else sys.argv[1] + '.json' with open(source, 'rb') as source_file: with open(target, 'w') as target_file: obj = cbor2.load(source_file) json.dump(obj, target_file, indent=2)
def wrap_direct(agent): logger.info('python %s' % ".".join(map(str, sys.version_info))) data_in = os.environ.get('AIDONODE_DATA_IN', '/dev/stdin') data_out = os.environ.get('AIDONODE_DATA_OUT', '/dev/stdout') while not os.path.exists(data_in): logger.info('Waiting for %s to be created.' % data_in) time.sleep(1) if data_in == '/dev/stdin': f_in = sys.stdin else: f_in = open(data_in, 'rb') # f_in = io.BufferedReader(io.open(f_in.fileno())) # f_in = sys.stdin if data_out.startswith('fifo:'): data_out = data_out.lstrip('fifo:') os.mkfifo(data_out) logger.info( 'Opening fifo %s for writing. Will block until reader appears.' % data_out) f_out = open(data_out, 'wb') logger.info('Starting reading from %s' % data_in) try: while True: # whatever # logger.info('Reading...') try: msg = cbor.load(f_in) except IOError as e: if e.errno == 29: break raise if not isinstance(msg, dict) or ((FIELD_CONTROL not in msg) and (FIELD_TOPIC not in msg)): # ignore things that we do not understand send_control_message(f_out, CTRL_NOT_UNDERSTOOD, "Protocol mismatch") send_control_message(f_out, CTRL_OVER) if FIELD_CONTROL in msg: c = msg[FIELD_CONTROL] if c == CTRL_CAPABILITIES: his = msg[FIELD_DATA] logger.info('His capabilities: %s' % his) capabilities = {'z2': {}} logger.info('My capabilities: %s' % capabilities) send_control_message(f_out, CTRL_UNDERSTOOD) send_control_message(f_out, CTRL_CAPABILITIES, capabilities) send_control_message(f_out, CTRL_OVER) else: msg = 'Could not deal with control message "%s".' % c send_control_message(f_out, CTRL_NOT_UNDERSTOOD, msg) send_control_message(f_out, CTRL_OVER) elif FIELD_TOPIC in msg: topic = msg[FIELD_TOPIC] data = msg.get(FIELD_DATA, None) fn = 'on_received_%s' % topic if not hasattr(agent, fn): msg = 'Could not deal with topic %s' % topic send_control_message(f_out, CTRL_NOT_UNDERSTOOD, msg) send_control_message(f_out, CTRL_OVER) else: send_control_message(f_out, CTRL_UNDERSTOOD) context = ConcreteContext(f_out) f = getattr(agent, fn) try: f(context=context, data=data) except BaseException: s = traceback.format_exc() logger.error(s) try: s = s.decode('utf-8') except: pass send_control_message(f_out, CTRL_ABORTED, s) raise finally: send_control_message(f_out, CTRL_OVER) else: send_control_message(f_out, CTRL_NOT_UNDERSTOOD, "I expect a topic message") send_control_message(f_out, CTRL_OVER) logger.info('Graceful exit.') except BaseException: f_out.flush() logger.error(traceback.format_exc()) sys.exit(1) finally: f_out.flush()
def parse_authenticator_data(auth_data: bytes) -> AuthenticatorData: """Parse the raw authenticator data. Args: auth_data (bytes): The raw authenticator data bytes. Returns: An instance of `AuthenticatorData`. Raises: ParserError: Could not correctly parse the authenticator data. DecodingError: Could not decode raw CBOR data. References: * https://w3.org/TR/webauthn/#authenticator-data """ if len(auth_data) < 37: raise ParserError('Attestation auth data must be at least 35 bytes') rp_id_hash = auth_data[:32] flags = auth_data[32] signature_counter_bytes = auth_data[33:37] signature_counter_uint32, = struct.unpack('>I', signature_counter_bytes) attested_credential_data_included = bool(flags & AuthenticatorDataFlag.AT.value) extension_data_included = bool(flags & AuthenticatorDataFlag.ED.value) remaining_bytes_io = io.BytesIO(auth_data[37:]) attested_credential_data = None aeci = None if attested_credential_data_included: try: aaguid = _read_bytes(remaining_bytes_io, 16) credential_id_length_bytes = _read_bytes(remaining_bytes_io, 2) credential_id_length_uint16, = struct.unpack( '>H', credential_id_length_bytes) credential_id = _read_bytes(remaining_bytes_io, credential_id_length_uint16) try: credential_public_key = cbor2.load(remaining_bytes_io) except cbor2.CBORDecodeError: raise DecodingError( 'Could not decode the credential public key CBOR') if type(credential_public_key) is not dict: raise ParserError('Credential public key must be a dictionary') cpk = parse_cose_key(credential_public_key) validate(cpk) attested_credential_data = AttestedCredentialData( aaguid=aaguid, credential_id_length=credential_id_length_uint16, credential_id=credential_id, credential_public_key=cpk, ) except EOFError: raise ParserError( 'Could not read the included attested credential data') if extension_data_included: try: try: extensions = cbor2.load(remaining_bytes_io) except cbor2.CBORDecodeError: raise DecodingError('Could not decode the extensions CBOR') if type(extensions) is not dict: raise ParserError('Extension data CBOR must be a dictionary') aeci = parse_extensions(extensions) except EOFError: raise ParserError('Could not read the included extension data') if remaining_bytes_io.read1(1) != b'': raise ParserError( 'The authenticator data has unexpected leftover bytes') return AuthenticatorData( rp_id_hash=rp_id_hash, flags=flags, sign_count=signature_counter_uint32, attested_credential_data=attested_credential_data, extensions=aeci, )
import cbor2 cbor_file_dir = '/data3/private/fengtao/Projects/cqr_t5/data_download/paragraphCorpus/dedup.articles-paragraphs.cbor' collection_file = '/data3/private/fengtao/Projects/cqr_t5/data_download/collection.tsv' with open( '/data3/private/fengtao/Projects/cqr_t5/data_download/paragraphCorpus/dedup.articles-paragraphs.cbor', 'rb') as fp: data = cbor2.decoder.load(fp) data2 = cbor2.load(fp) while True: a = fp.readline()
def _recv_datagram(self, sock, data:bytes, conv:Conversation): DTLS_FIRST_OCTETS = ( 20, # change_cipher_spec 21, # alert 22, # handshake 23, # application_data ) timestamp = datetime.now(timezone.utc) # Sequential data source buf = BufferedReader(BytesIO(data)) msg_count = 0 while True: first_data = buf.peek(1) if not first_data: break msg_count += 1 first_octet = first_data[0] major_type = first_octet >> 5 self.__logger.debug('Decoding message with first octet 0x%02x (major type %d)', first_octet, major_type) if first_octet == 0x00: self.__logger.info('Ignoring padding to end of packet') buf.seek(0, os.SEEK_END) elif first_octet in DTLS_FIRST_OCTETS: self.__logger.error('Unexpected DTLS handshake') if sock: self._starttls(sock, conv, server_side=True) else: self.__logger.error('Ignored DTLS message *within* DTLS plaintext') buf.seek(0, os.SEEK_END) elif first_octet == 0x06: if sock and self._config.require_tls: self.__logger.error('Rejecting non-secured bundle') return self.__logger.error('Ignoring BPv6 bundle and remainder of packet') buf.seek(0, os.SEEK_END) elif major_type == 4: if sock and self._config.require_tls: self.__logger.error('Rejecting non-secured bundle') return # Scan the single bundle message off_start = buf.tell() cbor2.load(buf) off_end = buf.tell() msg_data = data[off_start:off_end] self._add_rx_item( BundleItem( address=str(conv.peer_address), port=conv.peer_port, total_length=len(msg_data), file=BytesIO(msg_data) ) ) elif major_type == 5: # Map type extmap = cbor2.load(buf) self._recv_ext_map(sock, extmap, conv, timestamp) else: self.__logger.error('Unknown message type with first octet 0x%02x, ignoring remainder of packet', first_octet) buf.seek(0, os.SEEK_END) self.__logger.debug('Handled %d messages from packet', msg_count)
def parse(self, stream, media_type=None, parser_context=None): """parse the incoming bytestream as CBOR and returns the resulting data """ return cbor.load(stream)
def load_rust_dat(self, out_dir, file_name): self.out_dir = out_dir self.file_name = file_name self.cbor_file_path = self.file_name + ".cbor" self.mp4_file_name_header = self.file_name + "_M=r" self.tag = "rust" cbor_files = \ [f for f in os.listdir(self.out_dir) if os.path.isfile(os.path.join(self.out_dir, f)) and os.path.splitext(f)[1] == ".cbor"] found_wanted = False for fn in cbor_files: if self.cbor_file_path == fn: found_wanted = True break if not found_wanted: raise Exception( "Error: could not find requested file {} in dir {} with " "contents: {}".format(self.cbor_file_path, out_dir, cbor_files)) self.cbor_file_path = os.path.join(self.out_dir, self.cbor_file_path) snapshots = [] with open(self.cbor_file_path, mode='rb') as sf: world_info = cbor2.load(sf) while True: try: snapshots += cbor2.load(sf) except EOFError: break print("load_rust_dat | file_name: {} | snapshots found: {}".format( file_name, len(snapshots))) self.world_info = world_info self.generate_header_from_world_info() inter_params = self.world_info["world_params"]["interactions"] self.chem_attr_params = inter_params["chem_attr"] if self.chem_attr_params is not None: self.chem_center = np.array([ self.chem_attr_params["center"]["x"], self.chem_attr_params["center"]["y"] ]) else: self.chem_center = None self.char_t = world_info["char_quants"]["t"] self.tpoints = [ s["cells"][0]["tpoint"] * self.char_t for s in snapshots ] data = [s["cells"] for s in snapshots] self.snap_period = world_info["snap_period"] self.poly_per_c_per_s = \ cb.extract_p2ds_from_data(['core', 'poly'], data) self.centroids_per_c_per_s = np.array( [[np.average(poly, axis=0) for poly in poly_per_c] for poly_per_c in self.poly_per_c_per_s]) self.uivs_per_c_per_s = \ cb.extract_p2ds_from_data(['core', 'geom', 'unit_in_vecs'], data) self.rac_acts_per_c_per_s = \ cb.extract_scalars_from_data(['core', 'rac_acts'], data) self.rac_inacts_per_c_per_s = \ cb.extract_scalars_from_data(['core', 'rac_inacts'], data) self.rho_acts_per_c_per_s = \ cb.extract_scalars_from_data(['core', 'rho_acts'], data) self.rho_inacts_per_c_per_s = \ cb.extract_scalars_from_data(['core', 'rho_inacts'], data) self.x_cils_per_c_per_s = \ cb.extract_scalars_from_data(['interactions', 'x_cils'], data) self.x_cals_per_c_per_s = \ cb.extract_scalars_from_data(['interactions', 'x_cals'], data) self.x_coas_per_c_per_s = \ cb.extract_scalars_from_data(['interactions', 'x_coas'], data) self.x_adhs_per_c_per_s = \ cb.extract_p2ds_from_data(['interactions', 'x_adhs'], data) self.kgtps_rac_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'kgtps_rac'], data) self.kdgtps_rac_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'kdgtps_rac'], data) self.kgtps_rho_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'kgtps_rho'], data) self.kdgtps_rho_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'kdgtps_rho'], data) self.rac_act_net_fluxes_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'rac_act_net_fluxes'], data) self.rac_inact_net_fluxes_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'rac_inact_net_fluxes'], data) self.rho_act_net_fluxes_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'rho_act_net_fluxes'], data) self.rho_inact_net_fluxes_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'rho_inact_net_fluxes'], data) self.x_tens_per_c_per_s = \ cb.extract_scalars_from_data(['chem', 'x_tens'], data) self.edge_strains_per_c_per_s = \ cb.extract_scalars_from_data(['mech', 'edge_strains'], data) self.rgtp_forces_per_c_per_s = \ cb.extract_p2ds_from_data(['mech', 'rgtp_forces'], data) self.edge_forces_per_c_per_s = \ cb.extract_p2ds_from_data(['mech', 'edge_forces'], data) self.cyto_forces_per_c_per_s = \ cb.extract_p2ds_from_data(['mech', 'cyto_forces'], data) self.sum_forces_per_c_per_s = \ cb.extract_p2ds_from_data(['mech', 'sum_forces'], data) self.avg_tens_strain_per_c_per_s = \ cb.extract_scalars_from_data(['mech', 'avg_tens_strain'], data) self.load_animation_arrows()
import cbor2, cv2, binascii import pem from cryptography import x509 from cryptography.hazmat.backends import default_backend from cryptography.hazmat.primitives.asymmetric import padding from cryptography.hazmat.primitives import hashes from cryptography.exceptions import InvalidSignature import pprint as pp signatures = [] certificates = [] with open('signatures.cbor', 'rb') as fp: cbor_import = cbor2.load(fp) cert = cbor_import[-1] signatures = cbor_import[:-(1 + cbor_import[-1][0])] for i in range(cert[0] + 1): certificates.append(cbor_import[-(i + 1)][1]) if (len(certificates) > 1): print("Warning: Multiple certificates not currently supported") certificate = certificates[0] certs = pem.parse(bytes(certificate.encode('utf-8'))) cert = x509.load_pem_x509_certificate(bytes(str(certs[0]).encode('utf-8')), default_backend()) pubkey = cert.public_key() pad = padding.PKCS1v15() hashtype = hashes.SHA256()