def dataReceived(self, data): m = deserialize(data) if m["message"]["command"] == "verack": """Complete the handshake if we received both version and verack""" self.timeouts["verack"].cancel() del self.timeouts["verack"] if "version" not in self.timeouts: self.on_handshake_complete() elif m["message"]["command"] == "version": """Send the verack back""" # TODO: make sure this node uses NODE_NETWORK (and maybe NODE_BLOOM in the future) self.timeouts["version"].cancel() del self.timeouts["version"] self.transport.write(serialize(message(verack(), self.params))) if "verack" not in self.timeouts: self.on_handshake_complete() elif m["message"]["command"] == "inv": """Run through our callbacks to see if we are waiting on any of these inventory items""" inventory = deserialize(m["message"]["payload"], "inv") for item in inventory["inv"]: if item[1] in self.callbacks: self.callbacks[item[1]](item[1]) del self.callbacks[item[1]] elif item[0] == "TX": self.send_message(message(get_data(item[0], item[1]), self.params)) print "Peer %s:%s announced new %s %s" % (self.transport.getPeer().host, self.transport.getPeer().port, item[0], item[1]) elif m["message"]["command"] == "getdata": """Serve the data from inventory if we have it""" data_request = deserialize(m["message"]["payload"], "getdata") for item in data_request["getdata"]: if item[1] in self.inventory and item[0] == "TX": transaction = tx(self.inventory[item[1]]) self.send_message(message(transaction, self.params)) elif m["message"]["command"] == "tx": """Parse to check the script_pubkey data element against our subscriptions""" t = deserialize(m["message"]["payload"], "tx") for out in t['tx']['outs']: script = bitcoin.deserialize_script(out['script']) data_element = script[2] if len(script) == 5 else script[1] if data_element in self.callbacks: self.callbacks[data_element](t) else: print "Received message %s from %s:%s" % (m["message"]["command"], self.transport.getPeer().host, self.transport.getPeer().port)
def test_deserialize_array_of_kvps(self): input = ['name one', '=', 'value one', 'name two', '=', 'value two'] expected_output = [{ 'name one': 'value one' }, { 'name two': 'value two' }] output = deserializer.deserialize(input) self.assertEquals(cmp(expected_output, output), 0)
def deserialize_log(self, path): task_info, action_info, timestamp_info, solved_info = deserialize(path) self.log_time = solved_info["log_time"] self.task_id = task_info["task_id"] self.task_info = task_info self.action_info = action_info self.timestamp_info = timestamp_info self.solved_info = solved_info self.featurized_objects = self.get_objects_from_json(task_info) self.featurized_objects.extend(self.get_objects_from_json(action_info))
def test_deserialize_nested_array(self): input = [ 'name', '=', '{', 'sub_name', '=', 'derp', 'sub_name_2', '=', 'derp2', '}' ] expected_output = [{ 'name': [{ 'sub_name': 'derp' }, { 'sub_name_2': 'derp2' }] }] output = deserializer.deserialize(input) self.assertEquals(cmp(expected_output, output), 0)
def test_deserialize_doubly_nested_key(self): input = [ 'name', '=', '{', 'sub_name', '=', 'derp', 'sub_name_2', '=', '{', 'more_nesting', '=', 'a thing', '}', '}' ] expected_output = [{ 'name': [{ 'sub_name': 'derp' }, { 'sub_name_2': [{ 'more_nesting': 'a thing' }] }] }] output = deserializer.deserialize(input) self.assertEquals(cmp(expected_output, output), 0)
def test_deserialize_single_key_value_pair(self): input = ['name', '=', 'value'] expected_output = [{'name': 'value'}] output = deserializer.deserialize(input) self.assertEquals(cmp(expected_output, output), 0)
def test_deserialize_single_key_multi_value(self): input = ['name', '=', '{', 'first', 'second', '}'] expected_output = [{'name': ['first', 'second']}] output = deserializer.deserialize(input) self.assertEquals(cmp(expected_output, output), 0)
def collect(): global resultset """Method to collect profile data""" method = raw_input(""" Enter the method for data collection of your choice: 1: Scrape from public link 2: Enter individual data 3: Generate data 4: Read data from file """) resultset = dict() if method == '1': scraper.initiate() for profile in scraper.links: # TODO: variables musn't be global - functions must return them if profile.find('linkedin.com/pub/') > -1: profile = profile[(profile.find('linkedin.com/pub/')+17):] resultset[profile] = scraper.resumelist[profile] # TODO: Devise a method to get the profile's mail id # take the username and remove the /xxx/yyy/zzz numbers elif method == '2': uname = raw_input('Enter uname: ') details = dict() details['fname'] = raw_input('First name: ') details['lname'] = raw_input('Last name: ') details['email'] = raw_input('email: ') details['locality'] = raw_input('Locality: ') details['industry'] = raw_input('Industry: ') details['current'] = raw_input('Current position: ') details['past'] = raw_input('Past positions [separator: "|"]: ').split('|') details['experience'] = int(raw_input('Total job experience in years: ')) details['education'] = raw_input('Education [separator: "|"]: ').split('|') details['skills'] = raw_input('Skills [separator: "|"]:').split('|') details['project-descriptions'] = raw_input('Project descriptions [separator: "|"]: ').split('|') resultset[uname] = details elif method == '3': number = int(raw_input("How many profiles do you want to generate? ")) # If number < 1000, delay negligible if number < 1000: resultset.update(generator.generate(number)) else: # Use python multiprocessing capabilities to divide work start = datetime.now() pool = multiprocessing.Pool() factor = number/1000 for worker in xrange(0,multiprocessing.cpu_count()*factor): # Split the task of generating n numbers to all cpus pool.apply_async(generator.generate, (number/(multiprocessing.cpu_count()*factor),), callback=lambda profiles: appender(profiles)) pool.close() pool.join() end = datetime.now() print 'Finished generating', number, 'profiles in', print (str((end-start).seconds)+'.'+str((end-start).microseconds)), 'seconds' import gc gc.collect() elif method == '4': resultset = deserializer.deserialize('data/datastore.in') return resultset
def deserialize_eu4_text_to_object(text): return deserialize(tokenize(text))