def file_transfer_init(socket_descriptor, filename): file_transfer_payload['filename'] = filename file_transfer_payload['username'] = client_db['username'] request['request_type'] = "file_transfer_init" request['payload'] = file_transfer_payload utils.print_dict("File Init request", request) socket_descriptor.sendall(str(request)) data = socket_descriptor.recv(DATA_TRANSFER_SIZE) response = utils.get_dict_from_string(data) utils.print_dict("File Init response", response) if response['file_exists']: filesize = response['file_size'] message = raw_input("File Exists, " + str(filesize)+\ "Bytes, download? (Y/N)? -> ") if message.lower() == 'Y'.lower(): # socket_descriptor.send('OK') file_transfer(socket_descriptor, filename, int(filesize)) else: print "File download canceled" else: print "File doesn't exist"
def main(): # ------------------------------- # PARSE ARGUMENTS # ------------------------------- arg_names = ['command', 'dataset_name', 'dataset_type', 'snapshot_num'] if len(sys.argv) != 4: print("Please check the arguments.\n") print("Example usage:") print("python ./.../prepare_snapshots.py Twitter16 sequential 3") exit() args = dict(zip(arg_names, sys.argv)) dataset = args['dataset_name'] dataset_type = args['dataset_type'] snapshot_num = int(args['snapshot_num']) print_dict(args) # -------------------------- # INIT PATHS # -------------------------- paths = {} if dataset in ['Twitter15', 'Twitter16']: # paths['raw'] = './data/raw/rumor_detection_acl2017/' # paths['raw_label'] = os.path.join(paths['raw'], dataset.lower(), 'label.txt') # paths['raw_tree'] = os.path.join(paths['raw'], dataset.lower(), 'tree/') paths['resource_label'] = './resources/{0}/{0}_label_all.txt'.format(dataset) paths['resource_tree'] = './resources/{0}/data.TD_RvNN.vol_5000.txt'.format(dataset) # paths['timestamps'] = './data/timestamps/{}/timestamps.txt'.format(dataset) paths['timestamps_trim'] = './data/timestamps/{}/timestamps_trim.txt'.format(dataset) paths['sequential_snapshots'] = './data/timestamps/{}/sequential_snapshots_{:02}.txt'.format(dataset, snapshot_num) paths['temporal_snapshots'] = './data/timestamps/{}/temporal_snapshots_{:02}.txt'.format(dataset, snapshot_num) elif dataset in ['Weibo']: exit() else: exit() print_dict(paths)
def _fini(self, X, y): """ Wraps up the fit process and housekeeping.""" self.end_fit_ = time.time() self.fit_time_elapsed_ = (self.end_fit_ - self.start_fit_) self.results_.update({"Best Score": self.best_score_}) self.results_.update({"Best Estimator": self.best_estimator_}) self.results_.update({"Best Parameters": self.best_params_}) self.results_.update({"Selected Features": self.selected_features_}) self.results_.update({"Fit Time": self.fit_time_elapsed_}) d = { "FileSplit": self._split, "Group": self._group, "Model Id": self.model_id_, "Estimator": self.best_estimator_.__class__.__name__, "Score": self.best_score_, "Fit Time": self.fit_time_elapsed_ } self.performance_ = pd.DataFrame(data=d, index=[0]) print(f" Best CV Score: {np.round(self.best_score_,4)}") print(f" Best Test Score: {np.round(self.test_score_,4)}") print(f" # Features: {X.shape[1]}") print( f" Fit Time: {np.round(self.fit_time_elapsed_,4)} seconds" ) print( f" Test Time: {np.round(self.test_time_elapsed_,4)} seconds" ) print(f" Best Parameters:") print_dict(self.best_params_)
def file_transfer(socket_descriptor, filename, filesize): file_transfer_payload['filename'] = filename file_transfer_payload['username'] = client_db['username'] request['request_type'] = "file_transfer" request['payload'] = file_transfer_payload utils.print_dict("File Transfer Request", request) socket_descriptor.sendall(str(request)) new_filename = 'new_' + filename with open(new_filename, 'wb') as f: print "new_filename ", new_filename data = socket_descriptor.recv(DATA_TRANSFER_SIZE) totalRecv = len(data) f.write(data) print "{0:.2f}".format((totalRecv / float(filesize)) * 100) + "% Done" print "Naren Total recv {}, file size {}".format(totalRecv, filesize) while totalRecv < filesize: print "before recv " data = socket_descriptor.recv(DATA_TRANSFER_SIZE) totalRecv += len(data) f.write(data) print "{0:.2f}".format( (totalRecv / float(filesize)) * 100) + "% Done" print "Total recv {}, file size {}".format(totalRecv, filesize) print "out of while loop" return
def main(): mongo = MongoSpider(conf.mongo_spider) redis_word = redis.Redis(**conf.redis_word) keyword = Keyword(redis_word) for article in mongo.article.find({ 'v.seg': { '$gt': 0 } }).skip(1000).limit(10): if article is None or article['v']['seg'] <= 0: continue word = mongo.word_file.get(article['_id']) if word is not None: word = json.loads(word) title = article['title'] words = word['words'] res = keyword.make(title, words) if res is not None: print print print print '*' * 100 print 'http://www.haoku.net/articles/%s.html' % article['_id'] print_list(res['keys'], title='keys of %s' % title) print_dict(res['words'], title='index of %s' % title, cmp_key=lambda x: -x[1])
def SelectProfile(self, p): """ Select profile """ """ POST https://www.mos.ru/pgu/ru/application/dogm/journal/ """ ps = self._ps params = { "ajaxAction": "get_token", "ajaxModule": "DogmJournal", "data[login]": p.login, "data[pass]": p.password, "data[system]": p.system } ps.cookies["elk_token"] = "null" + "|" + self._pgu_mos_ru_token ps.cookies["elk_id"] = "" print("cookies:") print_dict(ps.cookies) print("params:") print_dict(params) ps.headers.update( {'referer': "https://www.mos.ru/pgu/ru/application/dogm/journal/"}) r = my_get_post(ps.post, "https://pgu.mos.ru/ru/application/dogm/journal/", data=params) print("Diary auth token:") print(r.text) pass """ https://dnevnik.mos.ru/lms/api/sessions """
def client_send_message_1(sd): utils.print_trancsation_message("Client sending message 1") # Fetching values of generator and prime number p, g = get_prime_and_gene() # client_Xa = int(raw_input("Please enter Xa value:")) client_Xa = 17 client_db['client_Xa'] = client_Xa client_eke = encrypt.get_eke(client_db['pwd_digest'], client_Xa, g, p) client_db['client_eke'] = client_eke # Loading message payload message_1_payload['username'] = client_db['username'] print "client_db['pwd_digest']", client_db['pwd_digest'] message_1_payload['client_eke'] = client_eke message_1_payload['prime_num'] = p message_1_payload['generator'] = g request['request_type'] = 'message_1' request['payload'] = message_1_payload utils.print_dict("Payload 1", request) utils.print_dict("Client db", client_db) sd.sendall(str(request))
def main(): #host = '127.0.0.1' #port = 7070 # Taking host name and port number from command line host = sys.argv[1] port = sys.argv[2] port = int(port) if len(sys.argv) != 3: print 'Usage: python %s <HostName> <PortNumber>' % (sys.argv[0]) sys.exit() # Socket creation,binding it a address and listening for client conenction sd = socket.socket() sd.bind((host, port)) sd.listen(5) retry_count = 0 print "\n" print "Socket created and server is running" print "\n" # Accepting a connection from client c, addr = sd.accept() print "Client connected ip:<" + str(port) + ">" request = None while True: print "Entering loop" try: request = utils.get_dict_from_string(c.recv(DATA_TRANSFER_SIZE)) # utils.print_dict( "Request from client", request) if request is None or request == "": print "Data received None" break except Exception as e: break if request != "": if request['request_type'] == "message_1": # Get server nonce server_Ns = encrypt.get_random_number() s_kas = process_message_1(request['payload'], server_Ns, c) if request['request_type'] == "message_3": process_message_3(request['payload'], s_kas, server_Ns, c) if request['request_type'] == "file_transfer_init": response = process_file_transfer_init_request( request['payload'], c) utils.print_dict("File transfer init request from client", response) c.sendall(str(response)) if request['request_type'] == "file_transfer": process_file_transfer_request(request['payload'], c) sd.close()
def get_authenticated(sd): user_creds = get_user_cred() utils.print_dict("User Credentials", user_creds) client_send_message_1(sd) client_recv_message_2(sd) client_send_message_3(sd) success = client_recv_message_4(sd) return success
def client_recv_message_2(sd): utils.print_trancsation_message("Client receiving message 2") msg_2 = sd.recv(DATA_TRANSFER_SIZE) message_2_payload = utils.get_dict_from_string(msg_2) utils.print_dict("Payload 2", message_2_payload) client_db['server_eke'] = message_2_payload['server_eke'] client_db['server_enc_nonce'] = message_2_payload['server_encrypted_nonce'] utils.print_dict("Client db", client_db)
def client_recv_message_4(sd): utils.print_trancsation_message("Client receiving message 4") msg_4 = sd.recv(DATA_TRANSFER_SIZE) message_4_payload = utils.get_dict_from_string(msg_4) utils.print_dict("Payload 4", message_4_payload) client_db[''] = message_4_payload['server_encrypted_Na'] utils.print_dict("Client db", client_db) return message_4_payload['success']
def diff(local_tree_dict=None, remote_tree_dict=None): diff_dict = { remote_only: dict(), modify: dict(), local_only: dict(), } remote_path = None if not remote_tree_dict or not local_tree_dict: if not test(): print("Error: test is error!") return None ip, remote_path, auth = read_config() local_path = "." remote_tree_dict = remote_tree(ip, remote_path, auth) if remote_tree_dict is None: print("Error: server maybe timeout.") local_tree_dict = utils.get_dir_tree(local_path) relative_path = "." _d = [(relative_path, list(remote_tree_dict.values())[0], list(local_tree_dict.values())[0])] while len(_d) > 0: _relative_path, _r, _l = _d.pop(0) for k in _r: _now_relative_path = tree_join_path(_relative_path, k) if k in _l: if isinstance(_r[k], dict): if isinstance(_l[k], dict): # 远程是文件夹 本地是文件夹 _d.append((_now_relative_path, _r[k], _l[k])) else: # 远程是文件夹 本地是文件 diff_dict[remote_only][_now_relative_path] = _r[k] diff_dict[local_only][_now_relative_path] = _l[k] else: if isinstance(_l[k], dict): # 远程是文件 本地是文件夹 diff_dict[remote_only][_now_relative_path] = _r[k] diff_dict[local_only][_now_relative_path] = _l[k] else: # 远程是文件 本地是文件 if _r[k] != _l[k]: diff_dict[modify][ _now_relative_path] = "{}_{}".format( _r[k], _l[k]) _l.pop(k) else: diff_dict[remote_only][_now_relative_path] = _r[k] for k in _l: _now_relative_path = tree_join_path(_relative_path, k) diff_dict[local_only][_now_relative_path] = _l[k] print("Diff:\nRemote root: '{}'".format(remote_path)) utils.print_dict(diff_dict) return diff_dict
def process_message_1(payload, server_nonce, connection): utils.print_trancsation_message("Server processing message 1") utils.print_dict("process_secret_key_request", payload) # Getting password digest for user name pass_digest = auth.get_password_for_user(payload['username']) print "\n" print "pass_digest: ", pass_digest # Get client dh key from client EKE client_dhkey = encrypt.decrypt_eke(payload['client_eke'], pass_digest) print "\n" print "client_dhkey: ", client_dhkey server_Xa = get_sever_secret_key() # Generate server dh key server_dhkey = encrypt.generate_dh_key(server_Xa, int(payload['generator']), int(payload['prime_num'])) print "server_dhkey: ", server_dhkey # Generate server EKE server_eke = encrypt.generate_EKE(server_dhkey, pass_digest) print "\n" print "server_eke: ", server_eke # Generate server KAS server_kas = encrypt.generate_kas(client_dhkey, server_Xa, payload['prime_num']) print "\n" print "server_kas: ", server_kas print "\n" print "server nonce: ", server_nonce # message_2_payload = {"server_eke": None, "server_encrypted_nonce" : None} # Encrypt Nonce using KAS encrypted_server_nonce = encrypt.encrypt_nonce(int(server_kas), (server_nonce)) print "encrypted_nonce: ", encrypted_server_nonce # Send Server EKE and Encrypted Nonce to client message_2_payload = { "server_eke": server_eke, "server_encrypted_nonce": encrypted_server_nonce } utils.print_dict("Server EKE and Encrypted Nonce", message_2_payload) connection.sendall(str(message_2_payload)) return int(server_kas)
def process_message_3(payload, server_kas, server_Ns, connection): utils.print_trancsation_message("Server processing message 3") utils.print_dict("Message 3 request: ", payload) ser_final_nonce = encrypt.decrypt_nonce( server_kas, payload['client_encrypted_final_nonce']) ser_dec_ns = encrypt.get_ns_from_final_nonce(ser_final_nonce) ser_side_na = encrypt.get_na_from_final_nonce(ser_final_nonce) print "payload['client_encrypted_final_nonce']: ", payload[ 'client_encrypted_final_nonce'] print "ser_final_nonce: ", ser_final_nonce print "ser_dec_ns : ", ser_dec_ns print "server_Ns : ", server_Ns print "ser_side_na: ", ser_side_na try: i_ser_dec_ns = int(ser_dec_ns) except: print "sfasf" message_4_payload['success'] = False message_4_payload['server_encrypted_Na'] = "naren" connection.sendall(str(message_4_payload)) return if (server_Ns == i_ser_dec_ns): print "Client successfully authenticated to server" utils.print_line() authorized_users[payload['username']] = True message_4_payload['success'] = True else: print "Hack Alert" message_4_payload['success'] = False message_4_payload['server_encrypted_Na'] = "naren" connection.sendall(str(message_4_payload)) return # Server sending message 4 utils.print_trancsation_message("Server sending message 4") server_enc_na = encrypt.encrypt_nonce(server_kas, ser_side_na) print "server_enc_na: ", server_enc_na message_4_payload['server_encrypted_Na'] = server_enc_na connection.sendall(str(message_4_payload)) utils.print_dict("message_4_payload", message_4_payload) # Server sending message 5 utils.print_trancsation_message("Server sending message 4")
def run(self): res = {} from simin import SIMIN for topic, words in SIMIN.iteritems(): words = words.split('|') tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1]) words = tags.keys() print '|'.join(OrderedDict(sorted(tags.iteritems(), key=lambda x: -x[1])).keys()) tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1]) print '|'.join(OrderedDict(sorted(tags.iteritems(), key=lambda x: -x[1])).keys()) res[topic] = tags return res
def start_crawl(cnt=28, write_file=False, show_output=True): if cnt > 28: cnt = 28 try: if (write_file): dict_list = [] landing_page = utils.crawl_page(URL) data_rows = landing_page.findAll('tr', {"class": ["gr", "gr_even"]}) print('PTWC (Pacific Tsunami Warning Center) (Past 30 days)') print('URL:', URL) for idx, row in enumerate(data_rows): if (idx >= cnt): break datum_dict = { "time": row.findAll('td')[0].text, "region": row.findAll('td')[1].text, "type": row.findAll('td')[2].text, "details_link": URL + row.findAll('td')[4].findAll('a')[1]['href'] } details_page = utils.crawl_page( datum_dict['details_link']).find('body').text evaluation_re = 'EVALUATION(\r\n|\r|\n){2}([ \w.]+(\r\n|\r|\n))+(\r\n|\r|\n)' evaluation_match = re.search(evaluation_re, details_page) if (evaluation_match): replace_dict = {"EVALUATION": '', "\r": '', "\n": '', "\t": ''} evaluation_match = utils.replace_all(evaluation_match.group(0), replace_dict) datum_dict['evaluation'] = evaluation_match else: print('NO EVALUATION FOUND') if (show_output): utils.print_dict(datum_dict) if (write_file): dict_list.append(datum_dict) if (write_file): utils.write_json_file(WEBSITE, dict_list) except Exception as e: print('err:', str(e))
def get_admins(message): # Gets all human administrators of a chat admins_objects = [ admin for admin in bot.get_chat_administrators(config.chat_id) if not admin.user.is_bot ] # Gets every administrator's id, appends the default admins' ids admins_ids = [ad.user.id for ad in admins_objects] + [*config.admins_default] # Writes all admins' ids and information to data # Removes obsolete admins # Sends a message with the output with shelve.open(config.data_name, 'c', writeback=True) as data: data['admins'] = config.admins_default if not data.get( 'admins') else data['admins'] for admin in admins_objects: admin_id = admin.user.id admin_data = '@' + admin.user.username if admin.user.username else admin.user.first_name if admin_id not in data['admins']: data['admins'][admin_id] = admin_data for admin_id in list(data['admins']): if admin_id not in admins_ids: del data['admins'][admin_id] bot.reply_to( message, 'List of bot\'s admins:\n{}'.format(print_dict(data['admins'])))
def main(self): res = {} from simin import SIMIN for topic, words in SIMIN.iteritems(): words = words.split('|') print topic res[topic] = {} for word in words: index = self.web.index.find(word) if index is None: continue if len(index['index']) > 100 and index['icon']: res[topic][word] = len(index['index']) res[topic] = OrderedDict(sorted(res[topic].iteritems(), key=lambda x: -x[1])) print_dict(res[topic], cmp_key=lambda x: -x[1]) print '|'.join(res[topic].keys()) return res
def main(self): res = {} from simin import SIMIN for topic, words in SIMIN.iteritems(): words = words.split('|') print topic res[topic] = {} for word in words: index = self.web.index.find(word) if index is None: continue if len(index['index']) > 100 and index['icon']: res[topic][word] = len(index['index']) res[topic] = OrderedDict( sorted(res[topic].iteritems(), key=lambda x: -x[1])) print_dict(res[topic], cmp_key=lambda x: -x[1]) print '|'.join(res[topic].keys()) return res
def run(self): res = {} from simin import SIMIN for topic, words in SIMIN.iteritems(): words = words.split('|') tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1]) words = tags.keys() print '|'.join( OrderedDict(sorted(tags.iteritems(), key=lambda x: -x[1])).keys()) tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1]) print '|'.join( OrderedDict(sorted(tags.iteritems(), key=lambda x: -x[1])).keys()) res[topic] = tags return res
def test_article_all(): urls = get_test_urls() res = [] for url in urls: try: html = get_or_cache(url) extractor = Article(html, url) res.append({ 'url':url, 'article':extractor.article, 'selector':extractor.selector, }) from utils import print_dict print_dict(extractor.article) print_dict(extractor.selector) except: print 'error', url print '-' * 80 print len(urls) return json.dumps(res)
def analyze(self, arch, genotype): """ :param arch: name of the analyzed arch :param genotype: a standard Genotype in genotypes.py or a genotype list consist of many Genotypes :return: None """ self._initialize() # analyze if isinstance(genotype, list): for i, _geno in enumerate(genotype): print('mixed_arch_%d' % i, _geno) self.geno_analyze(_geno) if isinstance(genotype, Genotype): print(arch, genotype) self.geno_analyze(genotype) # print if self.normal_op: print_dict(dict_normalize(self.normal_dict), sort=True, info='****** %s normal-op ******' % arch, accuracy=3) if self.reduce_op: print_dict(dict_normalize(self.reduce_dict), sort=True, info='****** %s reduce-op ******' % arch, accuracy=3) if self.width_depth: print_dict(self.geno_info, info='****** %s genotype-width & depth ******' % arch) print('')
def start_crawl(cnt=10000, write_file=False, show_output=True): try: news_end_points = crawl_news_end_point(cnt, write_file, show_output) # news_end_points = ['/echo/news/sahel-crisis-eu-gives-%E2%82%AC142-million-humanitarian-aid-2014_en'] if(write_file): dict_list = [] cnt_down = len(news_end_points) for end_point in news_end_points: page = utils.crawl_page(URL+end_point) date_re = '(\d){2}/(\d){2}/(\d){4}' publication_date = page.find('div', { "class": "row c_left field field-field_news_publication_date last" }).text publication_date = re.search(date_re, publication_date).group(0) image_url = page.find('div', { "class": "field-item even" }).img if image_url: image_url = image_url['src'].strip() else: image_url = '' datum_dict = { "title": page.find('h1', { "class": "title" }).text.strip(), "image": image_url, "content": page.find('div', { "class": "row c_left field field-body" }).text.strip(), "publication_date": publication_date.strip() } if(show_output): utils.print_dict(datum_dict) print('cnt left:', cnt_down) cnt_down = cnt_down - 1 if(write_file): dict_list.append(datum_dict) if(write_file): utils.write_json_file(WEBSITE, dict_list) except Exception as e: print('err:', e)
def analize(file_name): with open('data/raw/' + file_name, 'r') as file: icos = json.load(file) People.get_people_from_raw_data(icos, file_name) People.to_parsed_json(file_name) People.uncertain_roles_to_parsed_json(file_name) print() print("Count ICOs:\t{}".format(str(len(icos)))) print("Count People:\t{}".format(People.count_people)) print("Count Roles:\t{}".format(People.count_roles)) print() print("Count people by merging names of roles:") print("\tPEOPLE\tROLE") print_dict(People.count_people_by_role_name) print() print("Count people by merging the count of roles by person:") print("\tPEOPLE\tROLES") print_dict(People.count_people_by_role_count) print()
def main(): a = cache_key('cluster-test', base_cluster, 100000) res, cnt, loo = a['res'], a['cnt'], a['loo'] b = 0 q = {} for w, v in res.iteritems(): #v = dict(filter(lambda x: x[1] > 1, v.iteritems())) # if b >= 10000: # break # b += 1 if loo[w] < 10: continue v = dict(filter(lambda x: x[1] > 5, res[w].iteritems())) q[w] = cnt[w] if v: print_dict(v, title='word: %s %d' % (w, cnt[w]), cmp_key=lambda x: x[1]) print_dict(q, cmp_key=lambda x: x[1])
def test(url): html = get_or_cache(url) urls = html2urls(html, url, name=False) words = defaultdict(int) u = set() for i in urls: if i.startswith(url) and len(get_path(i).split('/')) <= 2: u.add(i) if len(u) < 10: for i in urls: if i.startswith(url) and len(get_path(i).split('/')) <= 3: u.add(i) if len(u) < 20: for i in urls: if i.startswith(url) and len(get_path(i).split('/')) <= 4: u.add(i) urls = list(u)[:10] for i in urls: res = url2meta(i, get=get_or_cache) if res is not None: if '_' in res['title']: for word in res['title'].split('_'): if word.strip(): words[word.strip()] += 1 elif '|' in res['title']: for word in res['title'].split('|'): if word.strip(): words[word.strip()] += 1 elif '-' in res['title']: for word in res['title'].split('-'): if word.strip(): words[word.strip()] += 1 print_dict(words, cmp_key=lambda x: -x[1], limit=5)
def main(): mongo = MongoSpider(conf.mongo_spider) redis_word = redis.Redis(**conf.redis_word) keyword = Keyword(redis_word) for article in mongo.article.find({'v.seg':{'$gt':0}}).skip(1000).limit(10): if article is None or article['v']['seg'] <= 0: continue word = mongo.word_file.get(article['_id']) if word is not None: word = json.loads(word) title = article['title'] words = word['words'] res = keyword.make(title, words) if res is not None: print print print print '*' * 100 print 'http://www.haoku.net/articles/%s.html' % article['_id'] print_list(res['keys'], title='keys of %s' % title) print_dict(res['words'], title='index of %s' % title, cmp_key=lambda x: -x[1])
def test_article(url): debug = True if request.args.get('debug') == 'true' else False url = url.split('#')[0].split('?')[0] if not url.startswith('http://'): return 'url is not startswith http://' add_test_url(url) html = get_or_cache(url) extractor = Article(html, url, debug=debug) article = extractor.article selector = extractor.selector from utils import print_dict print_dict(article) print_dict(selector) # if extractor.pages: # article['content'] = ArticleMerger( # url, # extractor.title, # fetch_urls(extractor.pages, handle=get_or_cache), # debug=debug, # **selector # ).content return json.dumps({'url':url, 'article':article, 'selector':selector})
def client_send_message_3(sd): utils.print_trancsation_message("Client sending message 3") message_3_payload['username'] = client_db['username'] server_dhkey = encrypt.decrypt_eke(client_db['server_eke'], client_db['pwd_digest']) client_kas = encrypt.generate_kas(server_dhkey, client_db['client_Xa'], client_db['prime_num']) client_db['client_kas'] = client_kas client_db['client_Na'] = encrypt.get_random_number() print "client_db['server_enc_nonce']", client_db['server_enc_nonce'] c_server_nonce = encrypt.decrypt_nonce(client_kas, client_db['server_enc_nonce']) print "c_server_nonce:", c_server_nonce final_nonce_c = encrypt.concat(c_server_nonce, client_db['client_Na']) # Client encypting final nonce with kas enc_final_nonce_c = encrypt.encrypt_nonce(client_kas, final_nonce_c) message_3_payload['username'] = client_db['username'] message_3_payload['client_encrypted_final_nonce'] = enc_final_nonce_c client_db['client_encrypted_final_nonce'] = enc_final_nonce_c request['request_type'] = 'message_3' request['payload'] = message_3_payload utils.print_dict("Payload 3", request) sd.sendall(str(request)) utils.print_dict("Client db", client_db) utils.print_trancsation_message( "With this send Server will know if the user is valid or not")
def __init__(self, root_path, campaigns, channel, norm_array, sig_sum, bkg_sum, bkg_list, sig_list, data_list, selected_features, reset_feature, reset_feature_name, rm_negative_weight_events, cut_features, cut_values, cut_types, test_rate, val_split, batch_size, id_dict, missing_train, missing_sig, use_PCA, pca_components): super().__init__() self.root_path = root_path self.campaigns = campaigns self.channel = channel self.norm_array = norm_array self.sig_sum = sig_sum self.bkg_sum = bkg_sum self.bkg_list = bkg_list self.sig_list = sig_list self.data_list = data_list self.selected_features = selected_features self.reset_feature = reset_feature self.reset_feature_name = reset_feature_name self.rm_negative_weight_events = rm_negative_weight_events self.cut_features = cut_features self.cut_values = cut_values self.cut_types = cut_types self.test_rate = test_rate self.val_split = val_split self.batch_size = batch_size self.id_dict = id_dict self.features_dict = dict( zip(self.selected_features, range(len(self.selected_features)))) print_dict(self.features_dict, "Features") self.missing_sig = missing_sig self.missing_train = missing_train if self.missing_train: self.sig_list = [i for i in self.sig_list if i not in missing_sig] self.use_PCA = use_PCA self.pca_components = pca_components
def run(self): words = u'小米|安卓|魅族|华为荣耀|HTC|iPhone|iPad|智能手机|智能硬件|ios|苹果|谷歌|三星|锤子|联想'.split('|') tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1]) words = tags.keys() tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1]) words = tags.keys() tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1])
def run(self): words = u'小米|安卓|魅族|华为荣耀|HTC|iPhone|iPad|智能手机|智能硬件|ios|苹果|谷歌|三星|锤子|联想'.split( '|') tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1]) words = tags.keys() tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1]) words = tags.keys() tags = self.load_words(words) print_dict(tags, cmp_key=lambda x: -x[1])
def test_word(self, word): tags = self.load_word(word) tags = dict(filter(lambda x: x[1] >= 5, tags.iteritems())) print_dict(tags, cmp_key=lambda x: -x[1]) words = tags.keys() tags = self.load_words(words, 3) print_dict(tags, cmp_key=lambda x: -x[1]) words = tags.keys() tags = self.load_words(words, 4) print_dict(tags, cmp_key=lambda x: -x[1]) return tags
import os import sys SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) sys.path = [os.path.join(SCRIPT_DIR + '/../')] + sys.path from mongojoin import MongoJoin from mongojoin import MongoCollection from utils import print_dict, print_list if __name__ == "__main__": m_1 = MongoCollection("test", "supplier", ["supplier_id", "name"], {}) c_1 = m_1.get_mongo_cursor() print "************************ COLLECTION : SUPPLIER ************************" c = c_1.find({}) print_list(c) aggregator = MongoJoin(m_1, m_1, ["supplier_id"]) print "\n************************ INNER JOIN **********************" print_dict(aggregator.inner()) print "\n********************** LEFT OUTER JOIN **********************" print_dict(aggregator.left_outer()) print "\n********************** RIGHT OUTER JOIN *********************" print_dict(aggregator.right_outer()) print "\n********************** FULL OUTER JOIN *********************" print_dict(aggregator.full_outer())
def main(args): import model_utils as mutils # Set the parameters from the specified file BEFORE any model.* import import model mutils.set_model_ps(args.psfile) import numpy as np import analysis import plotting from utils import print_dict, pairs from scipy.signal import resample from model.glomerule import Glomerule from model.mitral_cells import MitralCells from model.synapse import Synapse from model.granule_cells import GranuleCells # Reset old stuff from Brian memory clear(erase=True, all=True) defaultclock.reinit() # Initialize random generator (necessary mainly for parallel simulations) np.random.seed() """ Parameters ---------- Get the parameter values from the `ps` module, which in turn gets the values from the file specified in parameters.py. Set some aliases for the different cell population sizes. Also check that there is an even number of cells for each column. Finally set some simulation parameters. """ psmt = model.PARAMETERS['Mitral'] psgr = model.PARAMETERS['Granule'] pscommon = model.PARAMETERS['Common'] n_mitral = pscommon['N_mitral'] n_glomeruli = n_granule = n_subpop = pscommon['N_subpop'] # check to have an even number of mitral in each sub-population assert n_mitral % n_subpop == 0, \ "N_mitral is not a multiple of the number of sub-populations N_subpop." n_mitral_per_subpop = n_mitral/n_subpop defaultclock.dt = pscommon['simu_dt'] simu_length = pscommon['simu_length'] """ Population Initialization ------------------------- 1. glomeruli *. synapses between granule and mitral cells 3. mitral cells 4. granule cells """ # Glomeruli glom = Glomerule() glom.add_eqs() glom.make_pop(n_glomeruli*n_mitral_per_subpop) # Synapses (granule -- mitral) synexc = Synapse(synapse_type='exc') # excitatory synapse synexc.set_eqs_model() syninhib = Synapse(synapse_type='inhib') # inhibitory synapse syninhib.set_eqs_model() # Mitral cells mt = MitralCells() mt_supp_eqs = {'var': ['- I_syn', '- g_input*V'], 'eqs': [synexc.get_eqs_model(), Equations("g_input : siemens*meter**-2")]} mt.add_eqs(supp_eqs=mt_supp_eqs) mt.make_pop(n_mitral) mt.pop.V = (psmt['V_t'] - psmt['V_r'])*np.random.random_sample(np.shape(mt.pop.V)) \ + psmt['V_r'] # Granule Cells gr = GranuleCells() gr_supp_eqs = {'var': ['-I_syn'], 'eqs': [syninhib.get_eqs_model()]} gr.add_eqs(supp_eqs=gr_supp_eqs) gr.make_pop(n_granule) gr.pop.V_D = psgr['E_L'] gr.pop.V_S = psgr['E_L'] """ Connecting Populations ---------------------- 1. Glomeruli and mitral cells 2. Mitral cells and granule cells """ # Connecting mitral cells to glomeruli glmt_connections = diag(ones(n_mitral)) # Glomeruli--Mitral interactions @network_operation(when='start') def mt_input(): mt.pop.g_input = dot(glom.pop.g, glmt_connections) # Connecting sub-population of mitral cells to granule cells mtgr_connections = mutils.intrapop_connections(n_mitral, n_granule, n_subpop, n_mitral_per_subpop) # Inter subpopulation connectivities inter_conn_rate = pscommon['inter_conn_rate'] inter_conn_strength = pscommon['inter_conn_strength'] homeostasy = pscommon['homeostasy'] mtgr_connections, grmt_connections = mutils.interpop_connections(mtgr_connections, n_mitral, n_subpop, n_mitral_per_subpop, inter_conn_rate, inter_conn_strength,homeostasy) # Mitral--Granule interactions @network_operation(when='start') def graded_synapse(): """Computes granule and mitral s_syn""" mt.pop.state('T')[:] = 0. mt.pop.state('T')[mt.pop.get_refractory_indices()] = 1. gr.pop.s_syn = dot(mt.pop.s, mtgr_connections) mt.pop.s_syn = dot(gr.pop.s, grmt_connections) @network_operation(when='start') def sum_s(): """Computes granule self s_syn (for its glomerular column only)""" for subpop in xrange(n_subpop): start = subpop*n_mitral_per_subpop stop = start + n_mitral_per_subpop gr.pop.s_syn_self[subpop] = sum(mt.pop.state('s')[start:stop]) @network_operation(when='after_groups') def keep_reset(): mt.pop.state('V')[mt.pop.get_refractory_indices()] = psmt['V_r'] """ Simulation Monitoring --------------------- Monitor state variables for the different populations. """ glom_ps = ('g') mt_ps = ('s', 's_syn', 'V') gr_ps = ('V_D', 's_syn', 's', 's_syn_self') # Simulation monitors rec_neurons = True # Must be set to True if we want accurate MPS and STS timestep = int(pscommon['resample_dt']/pscommon['simu_dt']) monit_glom = mutils.monit(glom.pop, glom_ps, timestep, reclist=rec_neurons) monit_mt = mutils.monit(mt.pop, mt_ps, timestep, reclist=rec_neurons, spikes=True) monit_gr = mutils.monit(gr.pop, gr_ps, timestep) """ Running Simulation ------------------ Create Network object and put everything simulation related in it. Then run this network. """ # Gathering simulation objects netw = Network(glom.pop, mt.pop, gr.pop, mt_input, graded_synapse, keep_reset, sum_s, [m for m in monit_glom.values()], [m for m in monit_mt.values()], [m for m in monit_gr.values()]) # Simulation run if args.no_brian_output: report_output = None else: report_output = "text" netw.run(simu_length, report=report_output) """ Information Output ------------------ """ if args.full_ps: print 'Full set of parameters:' print_dict(model.PARAMETERS) burnin = pscommon['burnin'] times = monit_gr['s'].times sig_start = where(times > burnin)[0][0] sts_indexes = {} mps_indexes = {} fftmax = {} mps_indexes['whole'] = analysis.mps(monit_mt['V'], 0, n_mitral, sig_start) gr_s_syn_self_whole = np.zeros(monit_gr['s_syn_self'][0].shape) # MPS and STS computation for subpopulation for subpop in xrange(n_subpop): start = subpop*n_mitral_per_subpop stop = start + n_mitral_per_subpop sts = analysis.sts(monit_gr['s_syn_self'][subpop], monit_mt['spikes'], start, stop, sig_start, burnin) sts_indexes[subpop] = sts gr_s_syn_self_whole += monit_gr['s_syn_self'][subpop] mps = analysis.mps(monit_mt['V'], start, stop, sig_start) mps_indexes[subpop] = mps # STS for the whole population sts_indexes['whole'] = analysis.sts(gr_s_syn_self_whole, monit_mt['spikes'], 0, n_mitral, sig_start, burnin) # FFT Max index fftmax = analysis.fftmax(monit_gr['s_syn_self'], n_subpop, pscommon['resample_dt'], sig_start) # Peak distances index peak_distances = {} if n_subpop > 1: for sub_i, sub_j in pairs(n_subpop): sig1 = monit_gr['s_syn_self'][sub_i] sig2 = monit_gr['s_syn_self'][sub_j] if not peak_distances.has_key(sub_i): peak_distances[sub_i] = {} pd_index = analysis.peak_dist_circ_index(sig1, sig2) peak_distances[sub_i][sub_j] = {} peak_distances[sub_i][sub_j]['mean'] = pd_index[0] peak_distances[sub_i][sub_j]['disp'] = pd_index[1] if not args.no_summary: print '\nParameters: using', args.psfile print 'Populations:', n_subpop, 'glomerular columns;', print n_mitral, 'mitral cells;', n_granule, 'granule cells.' print 'Times:', simu_length, 'of simulation; dt =', defaultclock.dt, '.' print 'Indexes: STS =', sts_indexes, '\nMPS =', mps_indexes print 'FFT peaks (Hz):', fftmax print 'Peak distances index:', peak_distances """ Plotting -------- Plot monitored variables and a scatter plot. """ if not args.no_plot: # Raster plot spikes_it = monit_mt['spikes'].it plotting.raster_plot(spikes_it[0], spikes_it[1], mtgr_connections) # Membrane potentials if not rec_neurons: # if we only have a couple of recorded neurons plotting.memb_plot_figure(monit_mt, monit_gr, rec_neurons, n_granule) # Granule synapses plotting.granule_figure(monit_gr, pscommon) show() """ Simulation records ------------------ Put numpy arrays in var `results` to save them into the simulation record. Note: the variable must be monitored by Brian. """ # Add parameters ps_arrays = {'mtgr_connections': (mtgr_connections, "Connection matrix from mitral (rows) to granules (columns)")} # Add results array_spikes_it = np.array((monit_mt['spikes'].it[0], monit_mt['spikes'].it[1])) results = {} # Mean inputs mean_inputs = np.ndarray((n_glomeruli, monit_glom['g'].values.shape[1])) for glom in xrange(n_glomeruli): start_subpop = glom*n_mitral_per_subpop stop_subpop = start_subpop + n_mitral_per_subpop mean_inputs[glom] = np.mean(monit_glom['g'].values[start_subpop:stop_subpop], axis=0) # Mean membrane potentials mean_memb_pot = np.ndarray((n_glomeruli*2, monit_mt['V'].values.shape[1])) bin_interco_matrix = (mtgr_connections > 0.) interco_neurons = (bin_interco_matrix.sum(axis=1) > 1) for glom in xrange(n_glomeruli): start_subpop = glom*n_mitral_per_subpop stop_subpop = start_subpop + n_mitral_per_subpop # Get subpopulation membrane potentials and interconnected neurons subpop_memb_pot = monit_mt['V'].values[start_subpop:stop_subpop] subpop_interco_neurons = interco_neurons[start_subpop:stop_subpop] # Compute one mean for interconnected neurons and another for the other neurons mean_pop = np.mean(subpop_memb_pot[~subpop_interco_neurons], axis=0) mean_pop_interco = np.mean(subpop_memb_pot[subpop_interco_neurons], axis=0) mean_memb_pot[glom*2] = mean_pop mean_memb_pot[glom*2 + 1] = mean_pop_interco results['data'] = {'spikes_it': [array_spikes_it, "Spikes: one array for the neuron number, another one for the spike times."], 'input': [mean_inputs, "Mean network input conductance value for each glomerule."], 's_granule': [monit_gr['s'].values, "Variable 's' of the granules."], 's_syn_self': [monit_gr['s_syn_self'].values, "Variable 's_syn' for the granule, without integrating the mitral 's' from other subpopulations."], 'mean_memb_pot': [mean_memb_pot, "Mean membrane potential. For each subpop: one mean for the interconnected neurons and one mean for the non-interconnected neurons."]} results['indexes'] = {'MPS': mps_indexes, 'STS': sts_indexes, 'FFTMAX': fftmax, 'peak_distances': peak_distances} return {'set': model.PARAMETERS, 'arrays': ps_arrays}, results
def retrace_elf(elfname, jname, tinfo, interesting_blocks, verbosity = 0): if not os.path.isfile(jname): print('skipping {:s}, no trace {:s}'.format(elfname, jname)) return True timulator = Emulator(verbosity=verbosity, tracing=True, tinfo=tinfo) mulator = Emulator(verbosity=verbosity, tracing=True) mmap = [(model.ram_start, model.ram_size), (model.fram_start, model.fram_size)] cosim = Cosim([timulator, mulator], [False, False], mmap) master_idx = 0 cosim_repl.prog_and_sync(cosim, master_idx, elfname) cosim.run(max_steps=run_max_steps, interval=run_interval, passes=run_passes) tmp_jstr = json.dumps({'diff':cosim.diff(), 'trace':mulator.trace, 'iotrace':mulator.iotrace2}) tmp_jobj = json.loads(tmp_jstr) diff = tmp_jobj['diff'] trace = tmp_jobj['trace'] iotrace = tmp_jobj['iotrace'] old_diff, old_trace, old_iotrace = load_trace(jname) same = diff == old_diff if verbosity >= 0: print(' timed emulated {:s} against {:s}. Same? {:s}' .format(elfname, jname, repr(same))) # print('---ORIGINAL---') # utils.explain_diff(old_diff) # print('---EMULATOR---') # utils.explain_diff(diff) if not same: old_blocks = [] old_mismatches = compute_mismatches(old_diff, verbosity=verbosity) old_err = mismatches_to_blocks(old_trace, old_mismatches, old_blocks) blocks = [] mismatches = compute_mismatches(diff, verbosity=verbosity) err = mismatches_to_blocks(trace, mismatches, blocks) if old_err and err: print(' failures in both traces: {:s}'.format(elfname)) elif old_err: print(' BAD: failures in hardware trace: {:s}'.format(elfname)) elif err: print(' BAD: failures in emulator trace: {:s}'.format(elfname)) else: print(' successful trace: {:s}'.format(elfname)) old_blocks_index = {addr: (x, y) for (addr, x, y) in old_blocks} trace_errors = 0 uncovered = 0 for (addr, block, difference) in blocks: if addr in old_blocks_index: old_block, old_difference = old_blocks_index.pop(addr) if block != old_block: print(' BAD: trace difference at {:05x}'.format(addr)) trace_errors += 1 elif difference != old_difference: interesting_blocks.append((addr, old_block, old_difference)) if verbosity >= 0: print('timing difference for block at {:05x} of {:s}'.format(addr, elfname)) for fields in block: ins = isa.decode(fields['words'][0]) fmt, name, smode, dmode = isa.instr_to_modes(ins) if fmt == 'fmt1': rsrc = fields['rsrc'] rdst = fields['rdst'] if 'isrc' in fields: sval = ', {:#x}'.format(fields['isrc']) else: sval = '' print('{:s}\t{:s} (R{:d}{:s}), {:s} (R{:d})' .format(name, smode, rsrc, sval, dmode, rdst)) elif fmt == 'fmt2': rsrc = fields['rsrc'] if 'isrc' in fields: sval = ', {:#x}'.format(fields['isrc']) else: sval = '' print('{:s}\t{:s} (R{:d}{:s})' .format(name, smode, rsrc, sval)) elif fmt == 'jump': print('{:s}\t{:d}, taken={:s}' .format(name, fields['jump_offset'], str(fields['jump_taken']))) else: print('{:s}, {:s}, {:s}, {:s}'.format(fmt, name, smode, dmode)) utils.print_dict(fields) print('hardware: {:s}, emulator: {:s}' .format(repr(old_difference), repr(difference))) print('') else: uncovered += 1 if trace_errors > 0: print(' BAD: {:d} trace differences'.format(trace_errors)) if uncovered > 0 or len(old_blocks_index) > 0: print(' BAD: {:d} blocks unique to hardware, {:d} to emulator' .format(len(old_blocks_index), uncovered)) return same
def test_seg_with_name(name): article = utils.load_json(name) words = segmentor.seg(article['title'], article['content']) print '+' * 120 print 'article:', article['title'] # print 'content:', article['content'] utils.print_dict(words['all'], 'all words', cmp_key=lambda x: x[1]) utils.print_dict(words['src'], 'src words', cmp_key=lambda x: x[1]) utils.print_dict(words['nr'], 'nr words', cmp_key=lambda x: x[1]) utils.print_dict(words['ns'], 'ns words', cmp_key=lambda x: x[1]) utils.print_dict(words['nt'], 'nt words', cmp_key=lambda x: x[1]) utils.print_dict(words['eng'], 'eng words', cmp_key=lambda x: x[1]) utils.print_dict(words['compos'], 'compos words', cmp_key=lambda x: x[1]) utils.print_dict(words['blackets'], 'blackets words', cmp_key=lambda x: x[1]) print ' '.join(words['title']) print len(json.dumps(words))
import os import sys SCRIPT_DIR = os.path.abspath(os.path.dirname(__file__)) sys.path = [os.path.join(SCRIPT_DIR + '/../')] + sys.path from mongojoin import MongoJoin from mongojoin import MongoCollection from utils import print_dict, print_list if __name__ == "__main__": m_1 = MongoCollection("test", "supplier", [], {}) c_1 = m_1.get_mongo_cursor() aggregator = MongoJoin(m_1, m_1, ["supplier_id"]) print "\n************************ INNER JOIN **********************" print_dict(aggregator.inner())
return (m, M) with open('files/day{day}.txt'.format(day=DAY), 'r') as f: lines = [l.strip() for l in f.readlines()] tiles = {} for l in lines: tile = get_final_tile(l) if tile.get_tuple() in tiles: tiles[tile.get_tuple()] = not tiles[tile.get_tuple()] else: tiles[tile.get_tuple()] = True print_dict(tiles) print(sum([1 if tiles[tile] else 0 for tile in tiles])) new_tiles = {} for tile in tiles: if tiles[tile]: new_tiles[tile] = True tiles = new_tiles print_dict(tiles) for i in range(100): og_tiles = copy.deepcopy(tiles)
def step(self): pc = self.state.readreg(0) word = model.mk_read16(self.state.read8)(pc) ins = isa.decode(word) if ins is None: raise base.ExecuteError('failed to decode {:#04x} ( PC: {:05x})'.format(word, pc)) # TODO: iotrace should probably work in a reasonable way # right now we have two lists of io traces, one which includes all io, even not # from instruction execution, and a second one in self.iotrace2 which is only # io events from actually executing instructions if self.tracing: model.iotrace_next(self.iotrace) fields = ins.readfields(self.state) if self.tracing: self.trace.append(fields) if self.verbosity >= 2: print(utils.describe_regs(self.regs())) ins.describe() utils.print_dict(fields) ins.execute(fields) ins.writefields(self.state, fields) # remember the thing we just added to our iotrace, and make a dummy to intercept # non-execution IO before the next instruction if self.tracing: self.iotrace2.append(self.iotrace[-1]) model.iotrace_next(self.iotrace) # # manual breakpoints / watchpoints # step_io = self.iotrace2[-1] # for addr, value in step_io['w']['mem']: # if addr >= 0xfffe: # print(hex(pc)) # utils.print_dict(step_io) # raise base.Breakpoint('manual') # if pc == 0xfe84: # print(hex(pc)) # raise base.Breakpoint('manual') # # end # update the timer if we're doing that if self.timing: cycles = self._timer_update(ins, fields) self.timer_A.elapse(cycles) if self.tracing and self.verbosity >= 2: print('----') if word == 0x3fff: # halt return False else: return True