def pack(self, data): request = data.get('request', {}) response = data.get('response', {}) indices = data.get('indices', []) row = { 'f:url': data.get('url'), 'f:timestamp': str(data.get('timestamp')), 'f:request.method': request.get('method'), 'f:request.client': zlib.compress(cbor.dumps(request.get('client', {}))), 'f:request.headers': zlib.compress(cbor.dumps(request.get('headers', {}))), 'f:request.body': zlib.compress(cbor.dumps(request.get('body', {}))), 'f:response.status': str(response.get('status')), 'f:response.server.hostname': response.get('server', {}).get('hostname'), 'f:response.server.address': response.get('server', {}).get('address'), 'f:response.headers': zlib.compress(cbor.dumps(response.get('headers', {}))), 'f:response.body': zlib.compress(cbor.dumps(response.get('body', {}))), } keys = {} for ii in indices: kk = 'f:index.{}'.format(ii['key']) try: keys[kk] += 1 except KeyError: keys[kk] = 0 row["{}.{}".format(kk, str(keys[kk]))] = ii['value'] return row
def _create_transactions(self, count, matched_payload=True, valid_signature=True, valid_batcher=True): txn_list = [] for i in range(count): payload = {'Verb': 'set', 'Name': 'name' + str(random.randint(0, 100)), 'Value': random.randint(0, 100)} intkey_prefix = \ hashlib.sha512('intkey'.encode('utf-8')).hexdigest()[0:6] addr = intkey_prefix + \ hashlib.sha512(payload["Name"].encode('utf-8')).hexdigest() payload_encode = hashlib.sha512(cbor.dumps(payload)).hexdigest() header = TransactionHeader( signer_pubkey=self.public_key, family_name='intkey', family_version='1.0', inputs=[addr], outputs=[addr], dependencies=[], payload_encoding="application/cbor", payload_sha512=payload_encode) if valid_batcher: header.batcher_pubkey = self.public_key else: header.batcher_pubkey = "bad_batcher" header_bytes = header.SerializeToString() if valid_signature: signature = signing.sign( header_bytes, self.private_key) else: signature = "bad_signature" if not matched_payload: payload['Name'] = 'unmatched_payload' transaction = Transaction( header=header_bytes, payload=cbor.dumps(payload), header_signature=signature) txn_list.append(transaction) return txn_list
def v1_folder_extract_post(fid, sid): conf = yakonfig.get_global_config('coordinate') tm = coordinate.TaskMaster(conf) key = cbor.dumps((fid, sid)) wu_status = tm.get_work_unit_status('ingest', key) if wu_status and wu_status['status'] in (AVAILABLE, BLOCKED, PENDING): return {'state': 'pending'} else: logger.info('launching async work unit for %r', (fid, sid)) conf = yakonfig.get_global_config('coordinate') tm = coordinate.TaskMaster(conf) tm.add_work_units('ingest', [(cbor.dumps((fid, sid)), {})]) return {'state': 'submitted'}
def worker_call(self, name, arg, serial, allow_reply): if name in self._funcdict: try: s = self._funcdict[name](arg) logging.debug('method "%s" result: %s', name, s) if allow_reply: return cbor.dumps([cbor.dumps([s]), 0, serial]) except Exception as e: logging.debug('method "%s" throws an exception: %s', name, str(e)) if allow_reply: return cbor.dumps([str(e), 2, serial]) else: logging.debug('method "%s" not found', name) if allow_reply: return cbor.dumps([name, 4, serial])
def to_kvlayer(item): data = cbor.dumps({ 'is_folder': item.is_folder(), 'meta_data': item.meta_data, 'user_data': item.data, }) return ((utf8(item.namespace), utf8(item.owner), item.inode), data)
def v1_folder_extract_get(request, response, kvlclient, store, fid, sid): conf = yakonfig.get_global_config('coordinate') tm = coordinate.TaskMaster(conf) key = cbor.dumps((fid, sid)) wu_status = tm.get_work_unit_status('ingest', key) status = wu_status['status'] if status in (AVAILABLE, BLOCKED, PENDING): return {'state': 'pending'} elif status in (FINISHED,): kvlclient.setup_namespace({'openquery': (str,)}) data = None try: data = list(kvlclient.get('openquery', (key,))) assert len(data) == 1, data logger.info('got data of len 1: %r', data) assert data[0], data assert data[0][1], data data = data[0][1] data = json.loads(data) data['state'] = 'done' return data except: logger.info('kvlclient: %r', kvlclient) logger.error('Failed to get openquery data: %r', data, exc_info=True) return {'state': 'failed'} else: return {'state': 'failed'}
def send_cbor(self, obj): # XXX we prefix each CBOR object with a size because the `cbor' module # does not support incremental (push) decoding and we would not know # how much data to read on the receiving end. This is redundant but # apart from rewriting the cbor module and ugly hacks, not much can be # done. self.send_sized(cbor.dumps(obj))
def _rpc(self, method_name, params): '''Call a method on the server. Calls ``method_name(*params)`` remotely, and returns the results of that function call. Expected return types are primitives, lists, and dictionaries. :raise Exception: if the server response was a failure ''' ## it's really time and file-space consuming to log all the ## rpc data, but there are times when a developer needs to. mlog = None #mlog = logging.getLogger('cborrpc') tryn = 0 delay = self._base_retry_seconds self._message_count += 1 message = { 'id': self._message_count, 'method': method_name, 'params': params } if mlog is not None: mlog.debug('request %r', message) buf = cbor.dumps(message) errormessage = None while True: try: conn = self._conn() conn.send(buf) response = cbor.load(self.rfile) if mlog is not None: mlog.debug('response %r', response) assert response['id'] == message['id'] if 'result' in response: return response['result'] # From here on out we got a response, the server # didn't have some weird intermittent error or # non-connectivity, it gave us an error message. We # don't retry that, we raise it to the user. errormessage = response.get('error') if errormessage and hasattr(errormessage, 'get'): errormessage = errormessage.get('message') if not errormessage: errormessage = repr(response) break except Exception as ex: if tryn < self._retries: tryn += 1 logger.debug('ex in %r (%s), retrying %s in %s sec...', method_name, ex, tryn, delay, exc_info=True) self.close() time.sleep(delay) delay *= 2 continue logger.error('failed in rpc %r %r', method_name, params, exc_info=True) raise raise Exception(errormessage)
def dumps(self): '''Create a CBOR byte string from a feature collection.''' metadata = {'v': 'fc01'} if self.read_only: metadata['ro'] = 1 rep = [metadata, self.to_dict()] return cbor.dumps(rep)
def log(self, request, response): data = response2cca( response=response, request=request, client_info=self.client_info or None, ) headers = { 'Authorization': 'Token {}'.format(self.api_token), } if self.format == 'cbor': import cbor headers['Content-Type'] = 'application/cbor' body = cbor.dumps(data) elif self.format == 'json_latin1': headers['Content-Type'] = 'application/json' body = json.dumps(data, encoding='latin1') elif self.format == 'json_base64': headers['Content-Type'] = 'application/json' body = json.dumps(data, encoding='latin1') else: raise ValueError("ArchiveMiddleware is not configured properly.") self.request(self.api_url, method='POST', body=body, headers=headers)
def pull_req(self): try: data = cbor.dumps({ enums.TransferFields.AppName: self.appname, enums.TransferFields.RequestType: enums.RequestType.Pull, enums.TransferFields.Versions: self.parent_version }) req_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # req_socket.settimeout(2) self.logger.debug("Connecting to parent.") req_socket.connect(self.parent) self.logger.debug("Client Connection successful, sending data (pull req)") req_socket.send(pack("!L", len(data))) send_all(req_socket, data) self.logger.debug("Data sent (pull req)") content_length = unpack("!L", req_socket.recv(4))[0] data = cbor.loads(receive_data(req_socket, content_length)) self.logger.debug("Data received (pull req).") # Versions new_versions = data[enums.TransferFields.Versions] # Actual payload package = data[enums.TransferFields.Data] # Send bool status back. req_socket.send(pack("!?", True)) self.logger.debug("Ack sent (pull req)") req_socket.close() self.parent_version = self.get_new_version(new_versions) return package, new_versions except Exception as e: print ("PULL", e) print(traceback.format_exc()) raise
def test_sa_proposal_macs(self): msg = b'\x01' + cbor.dumps({'macs': ['sha3_512']}) mac = self.get_mac(msg, self.R_a) response = self.get_response(msg + mac).msg sa = cbor.loads(response[1:-8]) self.assertEqual(sa['mac'], 'sha3_512') self.assertEqual(sa['mac_len'], 8)
def pull_req(self): try: data = cbor.dumps({ enums.TransferFields.AppName: self.appname, enums.TransferFields.RequestType: enums.RequestType.Pull, enums.TransferFields.Versions: self.parent_version }) self.logger.debug( "Client Connection successful, sending data (pull req)") self.server_connection.send(pack("!L", len(data))) send_all(self.server_connection, data) self.logger.debug("Data sent (pull req)") content_length = unpack("!L", self.server_connection.recv(4))[0] resp = receive_data(self.server_connection, content_length) data = cbor.loads(resp) self.logger.debug("Data received (pull req).") # Versions new_versions = data[enums.TransferFields.Versions] # Actual payload package = data[enums.TransferFields.Data] # Send bool status back. self.server_connection.send(pack("!?", True)) self.logger.debug("Ack sent (pull req)") self.parent_version = self.get_new_version(new_versions) return package, new_versions except Exception as e: print ("PULL", e) print(traceback.format_exc()) raise
def push_req(self, diff_data, version): try: package = { enums.TransferFields.AppName: self.appname, enums.TransferFields.RequestType: enums.RequestType.Push, enums.TransferFields.Versions: version, enums.TransferFields.Data: diff_data } data = cbor.dumps(package) req_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) # req_socket.settimeout(2) self.logger.debug("Connecting to parent.") req_socket.connect(self.parent) self.logger.debug("Client Connection successful, sending data (push req)") req_socket.send(pack("!L", len(data))) send_all(req_socket, data) self.logger.debug("Data sent (push req)") succ = unpack("!?", req_socket.recv(1))[0] self.logger.debug("Ack recv (push req)") req_socket.close() self.parent_version = self.get_new_version(version) return succ except Exception as e: print ("PUSH", e) print(traceback.format_exc()) raise
def test_multihashing_cbor(): src = dumps({ 'name': 'hello.txt', 'size': 11 }, sort_keys=True) assert multihash(src) == 'QmQtX5JVbRa25LmQ1LHFChkXWW5GaWrp7JpymN4oPuBSmL'
def main(): p = argparse.ArgumentParser('simple debugging tool for watching the linker and OpenQuery') p.add_argument('action', help='either `run` or `cache` or `delete`') p.add_argument('folder', help='folder name') p.add_argument('subfolder', help='subfolder name') args = yakonfig.parse_args(p, [kvlayer, yakonfig]) config = yakonfig.get_global_config() key = cbor.dumps((args.folder.replace(' ', '_'), args.subfolder.replace(' ', '_'))) if args.action == 'run': web_conf = Config() with yakonfig.defaulted_config([kvlayer, dblogger, web_conf], config=config): traverse_extract_fetch(web_conf, key, stop_after_extraction=True) elif args.action == 'delete': kvlclient = kvlayer.client() kvlclient.setup_namespace({'openquery': (str,)}) kvlclient.delete('openquery', (key,)) print('deleted %r' % key) elif args.action == 'cache': kvlclient = kvlayer.client() kvlclient.setup_namespace({'openquery': (str,)}) count = 0 for rec in kvlclient.scan('openquery'): count += 1 if rec[0][0] == key: print rec print('%d cached queries' % count)
def test_sa_proposal_unsupported_macs(self): msg = b'\x01' + cbor.dumps({'macs': ['hmac-md5']}) mac = self.get_mac(msg, self.R_a) response = self.get_response(msg + mac).msg sa = cbor.loads(response[1:-self.mac_len]) self.assertEqual(sa['mac'], 'sha3_256') self.assertEqual(sa['mac_len'], 8)
def test_basic_retry(): connectfail = socket.error("connect fail") recvdata = cbor.dumps({'id': 1, 'result': 'ok'}) fs = FakeSocket(recvdata) tc = TestClient([connectfail, connectfail, fs]) xok = tc._rpc('foo', ['ha', 'ha']) assert xok == 'ok'
def dgen(): mpk = self._min_prio_key.get(work_spec_name) for wu in they: cdata = cbor.dumps(wu.data) if lowermpk(mpk, wu.key, wu.priority): mpk = (wu.key, wu.priority, wu.data) self._min_prio_key[work_spec_name] = mpk yield (work_spec_name, wu.key, wu.priority, cdata)
def reply(self, status, headers, body): assert 200 <= status[0] <= 599, status assert isinstance(status[1], UNICODE_TYPE), status assert isinstance(headers, dict), headers assert isinstance(body, bytes), body data = cbor.dumps([status, headers, body]) dll.stator_http_reply(self.id, data, len(data)) self.close()
def update(self, puts, deletes): with self._lmdb.begin(write=True, buffers=True) as txn: for k in deletes: txn.delete(k.encode()) for k, v in puts: packed = cbor.dumps(v) txn.put(k.encode(), packed, overwrite=True) self.sync()
def writeToOutput(ccaDoc, outputDir): if not os.path.exists(outputDir): os.makedirs(outputDir) print(outputDir) outputPath = outputDir + "/" + ccaDoc["key"] f = open(outputPath, "w") f.write(cbor.dumps(json.dumps(ccaDoc))) f.close()
def serialize(self, obj): """ Implements :func:`autobahn.wamp.interfaces.IObjectSerializer.serialize` """ data = cbor.dumps(obj) if self._batched: return struct.pack("!L", len(data)) + data else: return data
async def send_data(writer, data): try: raw_data = cbor.dumps(data) writer.write(pack("!L", len(raw_data))) writer.write(raw_data) await writer.drain() except Exception as e: print (e) raise
def update_chain(self, cell, timeout=TIMEOUT_SECS): try: cbor_bytes = cell.to_cbor_bytes() except AttributeError: cbor_bytes = cbor.dumps(cell) req = Transactor_pb2.UpdateRequest(chainCellCbor=cbor_bytes) ref = with_retry(self.client.UpdateChain, req, timeout) return MultihashReference.from_base58(ref.reference)
def _set_state_data(name, state, context): address = make_intkey_address(name) encoded = cbor.dumps(state) addresses = context.set_state({address: encoded}) if not addresses: raise InternalError('State error')
def set_batch(self, add_pairs, del_keys=None): with self._lmdb.begin(write=True, buffers=True) as txn: if del_keys is not None: for k in del_keys: txn.delete(k.encode()) for k, v in add_pairs: packed = cbor.dumps(v) txn.put(k.encode(), packed, overwrite=True) self.sync()
def insert_canonical(self, record, timeout=TIMEOUT_SECS): try: cbor_bytes = record.to_cbor_bytes() except AttributeError: cbor_bytes = cbor.dumps(record) req = Transactor_pb2.InsertRequest(canonicalCbor=cbor_bytes) ref = with_retry(self.client.InsertCanonical, req, timeout) return MultihashReference.from_base58(ref.reference)
def _send_transaction(self, verb, name, value, wait=None): payload = cbor.dumps({ 'Verb': verb, 'Name': name, 'Value': value, }) # Construct the address address = self._get_address(name) header = TransactionHeader( signer_public_key=self._signer.get_public_key().as_hex(), family_name="intkey", family_version="1.0", inputs=[address], outputs=[address], dependencies=[], payload_sha512=_sha512(payload), batcher_public_key=self._signer.get_public_key().as_hex(), nonce=hex(random.randint(0, 2**64)) ).SerializeToString() signature = self._signer.sign(header) transaction = Transaction( header=header, payload=payload, header_signature=signature ) batch_list = self._create_batch_list([transaction]) batch_id = batch_list.batches[0].header_signature if wait and wait > 0: wait_time = 0 start_time = time.time() response = self._send_request( "batches", batch_list.SerializeToString(), 'application/octet-stream', ) while wait_time < wait: status = self._get_status( batch_id, wait - int(wait_time), ) wait_time = time.time() - start_time if status != 'PENDING': return response return response return self._send_request( "batches", batch_list.SerializeToString(), 'application/octet-stream', )
def _value_from_label(self, label): # Pack value and rating into a single byte, since both will likely # be small integers. to_pack = (label.value.value+1) | (label.rating << 4) v = struct.pack('B', to_pack) # Encode the metadata. meta_ser = cbor.dumps(label.meta) return v + meta_ser
'Value': 1001254} payload2 = { 'Verb': 'inc', 'Name': 'Tempature', 'Value': 20} payload3 = { 'Verb': 'inc', 'Name': 'Locationlat', 'Value':78 } payload4 = { 'Verb': 'inc', 'Name': 'Locationlong', 'Value':89 } payload_bytes = cbor.dumps(payload) payload_bytes1 = cbor.dumps(payload1) payload_bytes2 = cbor.dumps(payload2) payload_bytes3 = cbor.dumps(payload3) payload_bytes4 = cbor.dumps(payload4) from hashlib import sha512 hash = sha512('intkey'.encode('utf-8')).hexdigest()[0:6] + sha512(payload['Name'].encode('utf-8')).hexdigest()[-64:] from sawtooth_sdk.protobuf.transaction_pb2 import TransactionHeader txn_header_bytes = TransactionHeader( family_name='intkey', family_version='1.0', inputs=[hash], outputs=[hash], signer_public_key=signer.get_public_key().as_hex(),
from logStore.transconn.database_connector import DatabaseConnector import cbor import event dc = DatabaseConnector() feed_ids = dc.get_all_feed_ids() n = dc.get_current_seq_no(feed_ids[1]) print("seq num:", n, "(feed 1)") tmp = 0 for i in range(0, n + 1): l = cbor.loads(dc.get_event(feed_ids[1], i)) tmp = cbor.dumps(l) print(cbor.loads(l[0])) # content bits # print(l[1]) print(cbor.loads(l[2])) # event print(event.get_hash(l[0])) print("-------------------")
def send_message(self, message): msg = cbor.dumps(message) data = self.header.pack(len(msg)) + msg self.socket.sendall(data)
def get_i_have_list(): # TODO: Change directory to database list_of_files = pcap_sync.create_list_of_files('udpDir/') # 4 return cbor.dumps(list_of_files)
def cbor(self): return cbor.dumps(self.__class__.__name__)
def to_cbor(self): """Returns a cbor serialized metrics object.""" return cbor.dumps(self._v1_metrics())
def start(self, from_, to, n): self._ended = False self.conv.send(cbor.dumps([0, [cbor.VarList(from_), to, n]])) yield from self._receive_stream(n)
def run(self): global pause, mode, mode_library while True: event_count = self.socket.poll(300) message = '' if event_count != 0: data = self.socket.recv() data = cbor.loads(data) if data['type'] == 'action': if data['value'] == 'pause': message = "Paused server" pause = True elif data['value'] == 'unpause': message = 'Looping back up again' pause = False elif data['value'] == 'restart': key = data['key'] key = hashlib.sha256(key.encode()).hexdigest() if key == self.hash_key: message = 'Restarting' pause = False mode_library.start() else: message = 'Key not correct!' elif data['value'] == 'save': if 'filename' in data.keys(): filename = data.split('filename') message = 'Saving variables to' + filename save_variables(filename) else: message = 'Saving variables to demo_vars.pkl' save_variables() elif data['value'] == 'load': if ':' in data: _, filename = data.split(':') message = 'Loading variables from' + filename load_variables(filename) else: message = 'Loading variables from demo_vars.pkl' load_variables() elif data['type'] == 'get': if data['value'] == 'state': paused = 'false' if pause: paused = 'true' message = cbor.dumps({'paused': paused}) self.socket.send(message) continue elif data['value'] == 'settings': variable_names = [ variable for variable in dir(mode_library.param) if not (variable.startswith('__') or variable == 'SliderValue') ] variables = {} for variable_name in variable_names: variable = getattr(mode_library.param, variable_name) if type(variable ) == luminolib.Settings.SliderValue: variables[variable_name] = str( str(variable.min) + ':' + str(variable.value) + ':' + str(variable.max)) else: variables[variable_name] = str(variable) print(" * CLIENT : requested settings '", variables, " *") message = cbor.dumps(variables) self.socket.send(message) continue elif data['value'] == 'leds': leds_as_dict = [] for i in range(self.leds.leds_count): leds_as_dict.append({ 'led': str(i), 'green': str(self.leds.get(i).green), 'red': str(self.leds.get(i).red), 'blue': str(self.leds.get(i).blue) }) message = cbor.dumps(leds_as_dict) self.socket.send(message) continue elif data['value'] == 'mode': message = cbor.dumps(mode) self.socket.send(message) continue elif data['type'] == 'call': function_name = data['value'] message = 'Calling ' + function_name getattr(mode_library, function_name)() elif data['type'] == 'set': if 'leds' in data.keys(): leds_modified = '[' for led_data in data["leds"]: led_number = led_data['var'].replace('led', '') led = self.leds.get(int(led_number)) led.green = int(led_data['green']) led.red = int(led_data['red']) led.blue = int(led_data['blue']) leds_modified = leds_modified + ' ' + str( led_number) leds_modified = leds_modified + ' ]' self.port.write(b'#') self.port.write(b'&') # tell that its to send data for i in range(self.leds.leds_count): led = self.leds.get(i) self.port.write(struct.pack('=B', led.green)) self.port.write(struct.pack('=B', led.red)) self.port.write(struct.pack('=B', led.blue)) self.port.write(b'?') elif 'var' in data.keys(): var_name = data['var'] value = data['value'] message = 'Modifying ' + var_name + ' to ' + value if 'cast' in data.keys(): try: if data['cast'] == 'int': value = int(value) setattr(mode_library.param, var_name, value) elif data['cast'] == 'float': value = float(value) setattr(mode_library.param, var_name, value) elif data['cast'] == 'str': value = str(value) setattr(mode_library.param, var_name, value) elif data['cast'] == 'bool': value = bool(value) setattr(mode_library.param, var_name, value) except ValueError: message = ' Cannot modify' + var_name + 'to' + value, 'of the type' + data[ 'cast'] else: var_type = type( getattr(mode_library.param, var_name)) if var_type == luminolib.Settings.SliderValue: slider = getattr(mode_library.param, var_name) slider.value = int(value) setattr(mode_library.param, var_name, slider) else: value = var_type(value) setattr(mode_library.param, var_name, value) elif 'mode' in data.keys(): if mode == 'rainbow': self.port.write(b'#') # stop looping if data['mode'] == 'rainbow': self.port.write(b'#') self.port.write(b'/') # rainbow code mode mode = data['mode'] mode_library = __import__(mode) mode_library.start() message = ' Mode changed to ' + mode else: message = 'Command not correct' if self.send_debug_to_client: self.socket.send(message.encode()) if self.print_debug and message != '': print(' * CLIENT :', message, '*')
if __name__ == '__main__': ep = Transport().endpoint() # Unaddressable transport. #ep = Transport(('127.0.0.1', 3000)).endpoint() print('connect') conn = ep.connect('relays.cardano-mainnet.iohk.io:3000:0') # cardano node handshake. # send peer data. DEFAULT_PEER_DATA = [ 764824073, # protocol magic. [0, 1, 0], # version { 0x04: [0, cbor.Tag(24, cbor.dumps(0x05))], 0x05: [0, cbor.Tag(24, cbor.dumps(0x04))], 0x06: [0, cbor.Tag(24, cbor.dumps(0x07))], 0x22: [0, cbor.Tag(24, cbor.dumps(0x5e))], 0x25: [0, cbor.Tag(24, cbor.dumps(0x5e))], 0x2b: [0, cbor.Tag(24, cbor.dumps(0x5d))], 0x31: [0, cbor.Tag(24, cbor.dumps(0x5c))], 0x37: [0, cbor.Tag(24, cbor.dumps(0x62))], 0x3d: [0, cbor.Tag(24, cbor.dumps(0x61))], 0x43: [0, cbor.Tag(24, cbor.dumps(0x60))], 0x49: [0, cbor.Tag(24, cbor.dumps(0x5f))], 0x53: [0, cbor.Tag(24, cbor.dumps(0x00))], 0x5c: [0, cbor.Tag(24, cbor.dumps(0x31))], 0x5d: [0, cbor.Tag(24, cbor.dumps(0x2b))], 0x5e: [0, cbor.Tag(24, cbor.dumps(0x25))], 0x5f: [0, cbor.Tag(24, cbor.dumps(0x49))],
] for i in range(NUM_BATCHES) ] payload_arr = [ [{ 'Verb': 'set', 'Name': ran_addr[i][j], 'Value': randint(0, 30000) } for j in range(NUM_TX_PER_BATCH) ] for i in range(NUM_BATCHES) ] #Generate the bytes of the payload payload_bytes_arr = [ [ cbor.dumps(payload_arr[i][j]) for j in range(NUM_TX_PER_BATCH) ] for i in range(NUM_BATCHES) ] #Generate an integerkey address holding 'example' tx_addr = [ [ sha512('intkey'.encode('utf-8')).hexdigest()[0:6] + sha512(ran_addr[i][j].encode('utf-8')).hexdigest()[-64:] for j in range(NUM_TX_PER_BATCH) ] for i in range(NUM_BATCHES) ] print("LA DIRECCION ES: {}".format(tx_addr)) #Generate the tx header tx_header_arr = [
def unprotect(self, protected_message, request_id=None): assert (request_id is not None) == protected_message.code.is_response() protected_serialized, protected, unprotected, ciphertext = self._extract_encrypted0( protected_message) if protected: raise ProtectionInvalid("The protected field is not empty") # FIXME check for duplicate keys in protected if unprotected.pop(COSE_KID, self.recipient_id) != self.recipient_id: # for most cases, this is caught by the session ID dispatch, but in # responses (where explicit sender IDs are atypical), this is a # valid check raise ProtectionInvalid("Sender ID does not match") if COSE_PIV not in unprotected: if request_id is None: raise ProtectionInvalid( "No sequence number provided in request") nonce = request_id.nonce seqno = None # sentinel for not striking out anyting else: partial_iv_short = unprotected[COSE_PIV] seqno = int.from_bytes(partial_iv_short, 'big') if not self.recipient_replay_window.is_valid(seqno): # If here we ever implement something that accepts memory loss # as in 7.5.2 ("Losing Part of the Context State" / "Replay # window"), or an optimization that accepts replays to avoid # storing responses for EXCHANGE_LIFETIM, can_reuse_nonce a few # lines down needs to take that into consideration. raise ReplayError("Sequence number was re-used") nonce = self._construct_nonce(partial_iv_short, self.recipient_id) if request_id is None: # ie. we're unprotecting a request request_id = RequestIdentifiers( self.recipient_id, partial_iv_short, nonce, can_reuse_nonce=self.is_unicast) # FIXME is it an error for additional data to be present in unprotected? if len( ciphertext ) < self.algorithm.tag_bytes + 1: # +1 assures access to plaintext[0] (the code) raise ProtectionInvalid("Ciphertext too short") enc_structure = [ 'Encrypt0', protected_serialized, self._extract_external_aad(protected_message, request_id.kid, request_id.partial_iv) ] aad = cbor.dumps(enc_structure) plaintext = self.algorithm.decrypt(ciphertext, aad, self.recipient_key, nonce) if seqno is not None: self.recipient_replay_window.strike_out(seqno) # FIXME add options from unprotected unprotected_message = Message(code=plaintext[0]) unprotected_message.payload = unprotected_message.opt.decode( plaintext[1:]) if unprotected_message.code.is_request(): if protected_message.opt.observe != 0: unprotected_message.opt.observe = None else: if protected_message.opt.observe is not None: # -1 ensures that they sort correctly in later reordering # detection. Note that neither -1 nor high (>3 byte) sequence # numbers can be serialized in the Observe option, but they are # in this implementation accepted for passing around. unprotected_message.opt.observe = -1 if seqno is None else seqno return unprotected_message, request_id
def protect(self, message, request_id=None, *, kid_context=True): """Given a plain CoAP message, create a protected message that contains message's options in the inner or outer CoAP message as described in OSCOAP. If the message is a response to a previous message, the additional data from unprotecting the request are passed in as request_id. When request data is present, its partial IV is reused if possible. The security context's ID context is encoded in the resulting message unless kid_context is explicitly set to a False; other values for the kid_context can be passed in as byte string in the same parameter. """ assert (request_id is None) == message.code.is_request() outer_message, inner_message = self._split_message(message) protected = {} nonce = None unprotected = {} if request_id is not None: nonce = request_id.get_reusable_nonce() if nonce is None: nonce, partial_iv_short = self._build_new_nonce() unprotected[COSE_PIV] = partial_iv_short if message.code.is_request(): unprotected[COSE_KID] = self.sender_id request_id = RequestIdentifiers(self.sender_id, partial_iv_short, nonce, can_reuse_nonce=None) if kid_context is True: if self.id_context is not None: unprotected[COSE_KID_CONTEXT] = self.id_context elif kid_context is not False: unprotected[COSE_KID_CONTEXT] = kid_context assert protected == {} protected_serialized = b'' # were it into an empty dict, it'd be the cbor dump enc_structure = [ 'Encrypt0', protected_serialized, self._extract_external_aad(outer_message, request_id.kid, request_id.partial_iv) ] aad = cbor.dumps(enc_structure) key = self.sender_key plaintext = bytes([inner_message.code]) + inner_message.opt.encode() if inner_message.payload: plaintext += bytes([0xFF]) plaintext += inner_message.payload ciphertext_and_tag = self.algorithm.encrypt(plaintext, aad, key, nonce) option_data = self._compress(unprotected, protected) outer_message.opt.object_security = option_data outer_message.payload = ciphertext_and_tag # FIXME go through options section # the request_id in the second argument should be discarded by the # caller when protecting a response -- is that reason enough for an # `if` and returning None? return outer_message, request_id
def run(self): answer = self.nobj shandle = self.shandle try: answer.value = cbor.loads(answer.value) graspi.tprint("CBOR value decoded") _cbor = True except: _cbor = False graspi.ttprint("listened, answer", answer.name, answer.value) graspi.tprint("Got request for", answer.value[0], answer.value[1]) if answer.dry: graspi.tprint("Dry run") result = True reason = None if answer.value[0] != "NZD": endit(shandle, "Invalid currency") elif answer.value[1] > reserves / 2: #other end wants too much, we need to negotiate proffer = int(reserves / 2) step = 1 concluded = False graspi.tprint("Starting negotiation") while not concluded: #proffer some resource graspi.tprint("Offering NZD", proffer) answer.value[1] = proffer if _cbor: answer.value = cbor.dumps(answer.value) _r = graspi.negotiate_step(asa_handle, shandle, answer, 1000) if _old_API: err, temp, answer = _r reason = answer else: err, temp, answer, reason = _r graspi.ttprint("Step", step, "gave:", err, temp, answer, reason) step += 1 if (not err) and temp == None: concluded = True graspi.tprint("Negotiation succeeded") elif not err: try: answer.value = cbor.loads(answer.value) graspi.tprint("CBOR value decoded") except: pass graspi.tprint("Loop count", answer.loop_count, "request", answer.value[1]) #maybe wait (for no particular reason) if _prng.randint(1, 10) % 2: err1 = graspi.negotiate_wait(asa_handle, shandle, wt) graspi.tprint("Tried wait:", graspi.etext[err1]) time.sleep( 10 ) # if wt<10000 this tests anomaly handling by the peer graspi.tprint("Woke up") if proffer < 0.6 * reserves: proffer += 10 if proffer > answer.value[1]: proffer = answer.value[ 1] - 1 #always be a little mean else: #we don't have enough resource, we will reject result = False #randomly choose English or Russian error message if reserves % 2: reason = "Insufficient funds" else: reason = u"Недостаточно средств" endit(shandle, reason) concluded = True else: #other end rejected or loop count exhausted concluded = True result = False if err == graspi.errors.loopExhausted: # we need to signal the end endit(shandle, graspi.etext[err]) elif err == graspi.errors.declined and reason != "": graspi.tprint("Declined:", reason) else: graspi.tprint("Failed:", graspi.etext[err]) #end of negotiation loop pass #out of negotiation loop else: #we can accept the initially requested value graspi.tprint("Request accepted") err = graspi.end_negotiate(asa_handle, shandle, True) if err: graspi.tprint("end_negotiate error:", graspi.etext[err])
def render_GET_advanced(self, request, response): print ("OICRES: get :", request.accept ) all_queries = request.uri_query print ("OICRES: queries:",all_queries) return_json = "" if all_queries == "if=oic.if.baseline": return_json = return_json + '[{ "rt": ["oic.wk.res"], ' return_json = return_json + '"if": ["oic.if.ll", "oic.if.baseline"],' return_json = return_json + '"links":' ## oic.if.ll return_json = return_json + '[ { "anchor": "ocf://'+ocf_piid+ '", "href": "/oic/res", "rel": "self",' return_json = return_json + '"rt": ["oic.wk.res"], "if": ["oic.if.ll", "oic.if.baseline"], "p": {"bm": 3},' return_json = return_json + ' "eps": [ {"ep": "coap://'+ocf_ip_address+'"},{"ep": "coaps://'+ocf_ip_address+'"} ] }' return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/oic/d",' return_json = return_json + ' "rt": ["oic.wk.d"], "if": ["oic.if.r", "oic.if.baseline"], "p": {"bm": 3},' return_json = return_json + ' "eps": [ {"ep": "coap://'+ocf_ip_address+'"},{"ep": "coaps://'+ocf_ip_address+'"} ] }' return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/oic/p",' return_json = return_json + ' "rt": ["oic.wk.p"], "if": ["oic.if.r", "oic.if.baseline"], "p": {"bm": 3},' return_json = return_json + ' "eps": [ {"ep": "coap://'+ocf_ip_address+'"},{"ep": "coaps://'+ocf_ip_address+'"} ] }' ## introspection return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/introspection",' return_json = return_json + ' "rt": ["oic.wk.introspection"], "if": ["oic.if.r", "oic.if.baseline"], "p": {"bm": 1},' return_json = return_json + ' "eps": [ {"ep": "coap://'+ocf_ip_address+'"},{"ep": "coaps://'+ocf_ip_address+'"} ] }' # security return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/oic/sec/doxm",' return_json = return_json + ' "rt": ["oic.r.doxm"], "if": ["oic.if.baseline"], "p": {"bm": 3},' return_json = return_json + ' "eps": [ {"ep": "coap://'+ocf_ip_address+'"}, {"ep": "coaps://'+ocf_ip_address+'"} ] }' # /oic/sec/pstat return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/oic/sec/pstat",' return_json = return_json + ' "rt": ["oic.r.pstat"], "if": ["oic.if.baseline"], "p": {"bm": 3},' return_json = return_json + ' "eps": [ {"ep": "coap://'+ocf_ip_address+'"},{"ep": "coaps://'+ocf_ip_address+'"} ] }' # /oic/sec/cred return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/oic/sec/cred",' return_json = return_json + ' "rt": ["oic.r.cred"], "if": ["oic.if.baseline"], "p": {"bm": 3},' return_json = return_json + ' "eps": [ {"ep": "coap://'+ocf_ip_address+'"},{"ep": "coaps://'+ocf_ip_address+'"} ] }' # /oic/sec/csr return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/oic/sec/csr",' return_json = return_json + ' "rt": ["oic.r.csr"], "if": ["oic.if.baseline"], "p": {"bm": 3},' return_json = return_json + ' "eps": [ {"ep": "coap://'+ocf_ip_address+'"},{"ep": "coaps://'+ocf_ip_address+'"} ] }' return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/activity",' return_json = return_json + ' "rt": ["oic.r.activity"],"if":' + '["oic.if.a", "oic.if.baseline"],' return_json = return_json + ' "p": {"bm": 3}, "eps": [ {"ep": "coap://'+ocf_ip_address+'"}, {"ep": "coaps://'+ocf_ip_address+'"}]}' return_json = return_json + ',{ "anchor": "ocf://'+ocf_piid+ '", "href": "/binaryswitch2",' return_json = return_json + ' "rt": ["oic.r.switch.binary"],"if":' + '["oic.if.a", "oic.if.baseline"],' return_json = return_json + ' "p": {"bm": 3}, "eps": [ {"ep": "coap://'+ocf_ip_address+'"}, {"ep": "coaps://'+ocf_ip_address+'"}]}' return_json = return_json + " ]" if all_queries == "if=oic.if.baseline": return_json = return_json + '}]' json_data = json.loads(return_json) self.payload = str(return_json) print (" return :") print (return_json) response.code = defines.Codes.CONTENT.number response.content_type = request.accept if request.accept == defines.Content_types["text/plain"]: print (" content type text/plain") response.payload = return_json elif request.accept == defines.Content_types["application/json"]: print (" content type application/json") response.payload = return_json elif request.accept == defines.Content_types["application/cbor"]: print (" content type application/cbor") response.payload = bytes(cbor.dumps(json_data)) elif request.accept == defines.Content_types["application/vnd.ocf+cbor"]: print (" content type application/vnd.ocf+cbor") response.payload = bytes(cbor.dumps(json_data)) response.ocf_content_format_version = int(2048) return self, response
def update(self, n): assert not self._ended self.conv.send(cbor.dumps([1, [n]])) yield from self._receive_stream(n)
import cbor payload = {'Verb': 'set', 'Name': 'vamshi', 'Value': 22} payload_bytes = cbor.dumps(payload)
def get_i_want_list(i_have_list): list_of_extensions = pcap_sync.compare_files(cbor.loads(i_have_list)) return cbor.dumps(list_of_extensions), list_of_extensions
def _serialize(data): return cbor.dumps(data)
def _send_dict(self, data): data2 = cbor.dumps(data) self._send_message(data2)
import cbor with open("int", 'wb') as f: f.write(cbor.dumps(-12345678)) with open("string", 'wb') as f: f.write(cbor.dumps("smeg")) with open("bytes", 'wb') as f: f.write(cbor.dumps(b'bytes')) with open("none", 'wb') as f: f.write(cbor.dumps(None)) with open("float", 'wb') as f: f.write(cbor.dumps(1.1)) with open("array", 'wb') as f: f.write(cbor.dumps([8, 1.1, "pie"])) with open("map", 'wb') as m: m.write(cbor.dumps({"one": 1, "two": "deux"})) with open("array-map", 'wb') as m: m.write(cbor.dumps([{"one": 1, "two": "deux", "nowt": None, "data": b'01234'}, {"three": 3, "four-something": 4.3, "lots": [1, 2, 3]}]))
s.setsockopt(socket.SOL_LORA, socket.SO_DR, 3) #Selecting non-confirmed type of messages s.setsockopt(socket.SOL_LORA, socket.SO_CONFIRMED, False) while True: #Wait for Lora connection print('Waiting for a LoRa connection...') while not lora.has_joined(): pycom.rgbled(0x43142f) #LED Violet print('Connected.') pycom.rgbled(0x000000) #LED OFF #Prepare message msg = 'Hello, world!' #Convert to CBOR msg = cbor.dumps(msg) print('Message content: ' + str(msg) + ' (length is ' + str(len(msg)) + ' bytes).') #Wait until data is sent (max. 10s) s.setblocking(True) s.settimeout(10) print('Sending message...') pycom.rgbled(0x00007f) #LED Blue try: s.send(msg) except: print('Failed to send message!')
import cbor import requests import nacl.signing import nacl.encoding signing_key = nacl.signing.SigningKey.generate() verify_key = signing_key.verify_key data = { 'network_id': '12345', 'key': verify_key.encode(encoder=nacl.encoding.RawEncoder) } data_encoded = cbor.dumps(data) signed_message = signing_key.sign(data_encoded) message = { 'network_sig': signed_message.signature, 'msg': signed_message.message } encoded_message = cbor.dumps(message) res = requests.post(url="http://localhost:5000/register/network", data=encoded_message, headers={'Content-Type': 'application/octet-stream'}) print(res) for line in res.iter_lines():
import cbor import binascii import scrypt import base64 import hmac import ed25519 import base58 words = 'ring crime symptom enough erupt lady behave ramp apart settle citizen junk' # words = Mnemonic('english').generate() print("\nMnemonic:", words) entropy = Mnemonic('english').to_entropy(words) print("Entropy:", entropy.hex()) cborEnt = cbor.dumps(bytes(entropy)) print("Serialised:", cborEnt.hex(), "\n") seed = hashlib.blake2b(cborEnt, digest_size=32) print("Seed:", seed.hexdigest()) cborSeed = cbor.dumps(seed.digest()) print("Serialised:", cborSeed.hex(), "\n") passPhrase = '' print("Spending pass:"******"Serialised:", passPhrase.hex()) seedBuf = cbor.dumps(cborSeed)
def toBytes(self): map = {"type": self.type.value} map.update(self.data) return cbor.dumps(map)
import sensorSim, json, time, os, datetime, pytz, cbor occupants = 0 temperature = 16.5 humidity = 45.5 count = 1 while True: currentTime = datetime.datetime.now(pytz.timezone('US/Eastern')) currentTimeString = currentTime.strftime('%m/%d/%y/%H/%M/%S') occupants = sensorSim.occupancyUp(occupants) occupants = sensorSim.occupancyDown(occupants) temperature = sensorSim.temperature(temperature) humidity = sensorSim.humidity(humidity) Environment = {'sensorData': {'SensorID': "Sensor1", 'Temperature': temperature, 'Humidity': humidity, 'Occupancy': occupants,'Time': currentTime}} jsonReturn = json.dumps(Environment, sort_keys=True, indent=4, default=str) cborReturn = cbor.dumps({'sensorData': {'SensorID': 'Sensor1', 'Temperature': temperature, 'Humidity': humidity, 'Occupancy': occupants,'Time': currentTimeString}}) cborReceive = cbor.loads(cborReturn) print("Sensor Reading", count) print(jsonReturn) print(cborReturn) print(cborReceive) #Putts cbor back to JSON count += 1 time.sleep(2)
def keepalive(self): while True: gevent.sleep(config.SLOT_DURATION / 1000) # keep alive self.conv.send(cbor.dumps(43))
def test_response_to_valid_sa(self): msg = b'\x81' + cbor.dumps({'mac': 'sha3_256', 'mac_len': 8}) mac = handshake_mac(self.session_key, msg) self.session.handle(msg + mac) self.assertEqual(self.session.other_seq, 0) self.assertEqual(self.session.state, ClientState.established)
def __call__(self): # instance Bi MsgSubscribe self.conv.send(cbor.dumps(42))
def to_cbor(self): if self._cbor is None: self._cbor = cbor.dumps(self.to_hash(), sort_keys=True) return self._cbor
def mine(self): print("start mining...") pool = self.blockchain.transactions_pool trans = [] temp_balance = {} # key = user value = balance for transaction in pool: trans.append(transaction) if transaction.sender not in temp_balance: # to do : function to get balance get_balance(transaction.sender) temp_balance[transaction.sender] = user_db[ transaction.sender].get("balance") if transaction.receiver not in temp_balance: temp_balance[transaction.receiver] = user_db[ transaction.receiver].get("balance") for transaction in trans: if temp_balance[transaction.sender] - transaction.amount < 0: print("Transaction invalid: insufficient balance...") trans.remove(transaction) self.blockchain.remove_transaction_from_pool(transaction) continue temp_balance[transaction.sender] -= transaction.amount temp_balance[transaction.receiver] += transaction.amount print("Transaction can be processed: suffice balance...") #get sender and amount # Init block and build tree prev_header = self.blockchain.longest_header if len(trans) == 0: return "All transactions invalid" block = Block(trans, prev_header) #get pow counter = 0 while True: if counter % 1000 == 0: print("Mine attempt:", counter) #drop block if someone else has added to the chain if prev_header != self.blockchain.longest_header: break genNonce = str(random.randint(0, 300000)) block.header['nonce'] = genNonce to_hash = cbor.dumps(block.header) digest = hashlib.sha256(to_hash).digest() if digest < self.blockchain.target: try: self.blockchain.add(block) for transaction in trans: user_db[transaction. sender]["balance"] -= transaction.amount user_db[transaction. receiver]["balance"] += transaction.amount # get miner balance user_db[ self.miner_id]["balance"] = user_db[self.miner_id].get( "balance", 0) + COINS_PER_BLOCK #reward # announce to everyone except Exception as e: print("Mining failed, blockchain reject submitted block:", e) break counter += 1