def setUpClass(cls): if not os.path.isfile("cryp/publicmediator.pem"): util.get_keys("mediator") create_local_mediator(5100) time.sleep(3)
def send_extra_data_to_mediator(data: dict, complaint, my_name): util.get_keys(my_name.split(":")[-1]) util.post_url( dict(complaint=util.vote_to_string(complaint), data=util.vote_to_string(data), server=my_name, sender=my_name), util.mediator + "/extra_data")
def complain_consistency(complaint: util.Complaint, servers, mediator, my_name): util.get_keys(my_name.split(":")[-1]) for server in servers + [mediator]: util.post_url( dict(complaint=util.vote_to_string(complaint), server=my_name, sender=my_name), server + "/messageinconsistency")
def setUpClass(cls): for n in test_keys_necessary: if not os.path.isfile("cryp/public{}.pem".format(n)): util.get_keys(n) for i in range(4): create_local_server(5000 + i) create_local_mediator(5100) time.sleep(3)
def create_local(port): global my_name, testing @app.route("/shutdown") def stop_server(): shutdown_server() return 'Server shutting down...' util.get_keys(my_name) testing = True app.run(port=int(port), debug=False, use_reloader=False, threaded=True)
def is_email_available(cls, email, self_key=None): if not config.CONFIG_DB.check_unique_email: return True user_keys, _ = util.get_keys( cls.query(), email=email, verified=True, limit=2, ) return not user_keys or self_key in user_keys and not user_keys[1:]
def main(): global latest_status_id global comment_txt_file_path global keys_obj global city_position keys_obj = util.get_keys(KEY_FILE_PATH) net_is_ok = False Client.init(keys_obj['client_id'], keys_obj['client_secret']) while not net_is_ok: try: city_position = Client(keys_obj['username1'], keys_obj['password1']) except: wait_second = 2 os.system('clear') print 'net is not ok~,will try after ' + str( wait_second) + ' second...' print 'please check the internet connection!' sleep(wait_second) else: net_is_ok = True InfoWriter.init(keys_obj['info_dir_path'], keys_obj['weibo_file_name'], keys_obj['friends_ids_file_name'], keys_obj['lock_file_name'], keys_obj['user_name_file_name']) comment_txt_file_path = keys_obj['info_dir_path'] + keys_obj[ 'comment_txt_file_name'] latest_status_id = file(keys_obj['info_dir_path'] + keys_obj['last_weibo_id_file_name']).readline() print latest_status_id run()
def main(): global latest_status_id global comment_txt_file_path global keys_obj global city_position keys_obj = util.get_keys(KEY_FILE_PATH) net_is_ok = False Client.init(keys_obj['client_id'],keys_obj['client_secret']) while not net_is_ok: try: city_position = Client(keys_obj['username1'],keys_obj['password1']) except: wait_second = 2 os.system('clear') print 'net is not ok~,will try after '+str(wait_second)+' second...' print 'please check the internet connection!' sleep(wait_second) else: net_is_ok = True InfoWriter.init(keys_obj['info_dir_path'],keys_obj['weibo_file_name'],keys_obj['friends_ids_file_name'],keys_obj['lock_file_name'],keys_obj['user_name_file_name']) comment_txt_file_path = keys_obj['info_dir_path']+keys_obj['comment_txt_file_name'] latest_status_id = file(keys_obj['info_dir_path']+keys_obj['last_weibo_id_file_name']).readline() print latest_status_id run()
def test_pkcs(self): priv_key, pub_key, key_len_bytes = util.get_keys( 'test/fixtures/e3_test_key') msg = b'test string' sig = pkcs15.sign(priv_key, key_len_bytes, msg) pkcs15.verify_unsafe(pub_key, key_len_bytes, sig, msg)
def get_newly_created_asic_db_key(self): route_entries = util.get_keys(self.asic_db, self.ASIC_DB_TBL_NAME) for key in route_entries: if key not in self._original_entries["%s:%s" % (self.asic_db, self.ASIC_DB_TBL_NAME)]: asic_db_key = key break return asic_db_key
def get_newly_created_nexthop_oid(self): nexthop_oid = None nexthop_entries = util.get_keys(self.asic_db, self.ASIC_DB_TBL_NAME) for key in nexthop_entries: if key not in self._original_entries["{}:{}".format( self.asic_db, self.ASIC_DB_TBL_NAME)]: nexthop_oid = key break return nexthop_oid
def get_newly_created_wcmp_group_member_asic_db_key(self): asic_db_wcmp_group_member_key = None wcmp_group_member_entries = util.get_keys( self.asic_db, self.ASIC_DB_GROUP_MEMBER_TBL_NAME) for key in wcmp_group_member_entries: if key not in self._original_entries["{}:{}".format( self.asic_db, self.ASIC_DB_GROUP_MEMBER_TBL_NAME)]: asic_db_wcmp_group_member_key = key break return asic_db_wcmp_group_member_key
def get_newly_created_wcmp_group_oid(self): wcmp_group_oid = None wcmp_group_entries = util.get_keys(self.asic_db, self.ASIC_DB_GROUP_TBL_NAME) for key in wcmp_group_entries: if key not in self._original_entries["{}:{}".format( self.asic_db, self.ASIC_DB_GROUP_TBL_NAME)]: wcmp_group_oid = key break return wcmp_group_oid
def create_local(port, cheat=False, cheating_ns=[], cheatid=0): global my_name, testing, server_nr, cheating, cheating_nums, cheat_id cheating = cheat cheating_nums = cheating_ns cheat_id = cheatid @app.route("/shutdown") def stop_server(): print("stopping", port) shutdown_server() return 'Server shutting down...' util.get_keys(str(port)) testing = True my_name = "http://127.0.0.1:" + str(port) server_nr = int(port) communication_number = 0 print("starting ", port) app.run(port=int(port), debug=False, use_reloader=False, threaded=True)
def postvote(client_name: str, vote: list, servers: list): ### Parameters: ### client_name: unique identifier for client ### vote: a list consisting of matrices containing the different secret shared r_i-element of the vote ### servers: a list with all servers which the secrets should be distributed to ### ### Returns: ### void. Should only distribute secret shares to servers for i,share in enumerate(vote): rest = vote.copy() rest.pop(i) ids = [0,1,2,3] ids.remove(i) rest_strings = [] for vote_partition in rest: rest_strings.append(util.vote_to_string(vote_partition)) m = dict(client=client_name, ids=ids, server=servers[i], votes=rest_strings, sender=client_name) util.get_keys(client_name) util.post_url(m, servers[i] + '/vote')
def test_s6c42(self): priv_key, pub_key, key_len_bytes = util.get_keys( 'test/fixtures/e3_test_key') msg = b'hi mom' fake_sig = c42.forge_signature(pub_key, msg) real_sig = pkcs15.sign(priv_key, key_len_bytes, msg) self.assertNotEqual(fake_sig, real_sig) real, _ = pkcs15.verify_unsafe(pub_key, key_len_bytes, real_sig, msg) self.assertTrue(real) fake, _ = pkcs15.verify_unsafe(pub_key, key_len_bytes, fake_sig, msg) self.assertTrue(fake)
def get_transaction_aggregate(): transaction_list = [] utc_now = datetime.utcnow() expires_dt = utc_now - timedelta(hours=3, minutes=1) keys = get_keys(expires_dt, utc_now, timedelta(minutes=1), REDIS_AGGREGATE_KEY_NS) redis_client.zunionstore(REDIS_AGGREGATE_SCORE_KEY_NS, len(keys), *keys) records = redis_client.zrevrange(REDIS_AGGREGATE_SCORE_KEY_NS, 0, -1, 'WITHSCORES') for x in range(0, len(records), 2): transaction_list.append({ 'address': records[x].decode("utf-8"), 'value': int(records[x + 1]) }) return jsonify(transaction_list)
def get_transactions_per_min(min_value): # http://0.0.0.0:5000/transactions_count_per_minute/2019-10-01 14:57 transactions_count = [] datetime_str = min_value + ':00' end_dt = datetime.strptime(datetime_str, '%Y-%m-%d %H:%M:%S') start_dt = end_dt - timedelta(hours=1, minutes=1) dts = get_keys(start_dt, end_dt, timedelta(minutes=1), REDIS_TX_COUNTER_KEY_NS) records = redis_client.mget(*dts) for key, count in zip(dts, records): _min_value = key.split(':')[1] _min = '%s:%s' % (_min_value[:2], _min_value[2:]) _count = 0 if count: _count = int(count.decode('utf8')) transactions_count.append({'minute': _min, 'count': _count}) return jsonify(transactions_count)
def create_local_mediator(port): util.get_keys("mediator") pr = mp.Process(target=Mediator.mediator.create_local, args=(str(port),)) pr.start()
def is_username_available(cls, username, self_key=None): if self_key is None: return cls.get_by('username', username) is None user_keys, _ = util.get_keys(cls.query(), username=username, limit=2) return not user_keys or self_key in user_keys and not user_keys[1:]
def __init__(self, key_name): self._priv_key, self._pub_key, _ = util.get_keys(key_name) self.data = set()
#!/usr/bin/python #-*- coding:utf8 -*- import util import json from urllib import urlretrieve keys_obj = util.get_keys("keys.json") util.Client.init(keys_obj['client_id'], keys_obj['client_secret']) client = util.Client(keys_obj['username1'], keys_obj['password1']) ids = client.get_somebody_follower_ids(keys_obj['who_follower_avatar']) pic_url_list = [] for follower_id in ids: print 'id:' + str(follower_id) info = client.get_somebody_info(follower_id) try: pic_url = info['profile_image_url'] pic_url_list.append(pic_url) urlretrieve(pic_url, './heads/' + str(follower_id) + '.jpeg') except: print 'err:' + str(follower_id) print 'msg:' + json.dumps(info) print '\n' continue print 'picNum:' + str(len(pic_url_list))
def timer(t, protocol, complaint): util.get_keys("mediator") pr = mp.Process(target=handle_complaint, args=(t, protocol, complaint)) pr.start()
def test_signature_neg(self): util.get_keys("") res = util.make_post_signature(dict(test="1234")) self.assertFalse(util.verify(bytes(res["signature"][:-1]), res["data"], res["pub"].decode()))
def test_MirrorSessionAddModifyAndDelete(self, dvs, testlog): # Initialize database connectors self._set_up(dvs) # Maintain list of original Application and ASIC DB entries before adding # new mirror session original_appl_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.appl_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME) original_appl_state_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.appl_state_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME) original_asic_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.asic_db, self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME) # 1. Create mirror session mirror_session_id = "mirror_session1" action = "mirror_as_ipv4_erspan" port = "Ethernet8" src_ip = "10.206.196.31" dst_ip = "172.20.0.203" src_mac = "00:02:03:04:05:06" dst_mac = "00:1A:11:17:5F:80" ttl = "0x40" tos = "0x00" attr_list_in_app_db = [ (self._p4rt_mirror_session_wrapper.ACTION, action), (util.prepend_param_field(self._p4rt_mirror_session_wrapper.PORT), port), (util.prepend_param_field( self._p4rt_mirror_session_wrapper.SRC_IP), src_ip), (util.prepend_param_field( self._p4rt_mirror_session_wrapper.DST_IP), dst_ip), (util.prepend_param_field( self._p4rt_mirror_session_wrapper.SRC_MAC), src_mac), (util.prepend_param_field( self._p4rt_mirror_session_wrapper.DST_MAC), dst_mac), (util.prepend_param_field(self._p4rt_mirror_session_wrapper.TTL), ttl), (util.prepend_param_field(self._p4rt_mirror_session_wrapper.TOS), tos) ] mirror_session_key = self._p4rt_mirror_session_wrapper.generate_app_db_key( mirror_session_id) self._p4rt_mirror_session_wrapper.set_app_db_entry( mirror_session_key, attr_list_in_app_db) util.verify_response(self._response_consumer, mirror_session_key, attr_list_in_app_db, "SWSS_RC_SUCCESS") # Query application database for mirror entries appl_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.appl_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME) assert len( appl_mirror_entries) == len(original_appl_mirror_entries) + 1 # Query application database for newly created mirror key (status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME, mirror_session_key) assert status == True util.verify_attr(fvs, attr_list_in_app_db) # Query application state database for mirror entries appl_state_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.appl_state_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME) assert len(appl_state_mirror_entries ) == len(original_appl_state_mirror_entries) + 1 # Query application state database for newly created mirror key (status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_state_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME, mirror_session_key) assert status == True util.verify_attr(fvs, attr_list_in_app_db) # Query ASIC database for mirror entries asic_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.asic_db, self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME) assert len( asic_mirror_entries) == len(original_asic_mirror_entries) + 1 # Query ASIC database for newly created mirror key asic_db_key = None for key in asic_mirror_entries: # Get newly created entry if key not in original_asic_mirror_entries: asic_db_key = key break assert asic_db_key is not None (status, fvs) = util.get_key( self._p4rt_mirror_session_wrapper.asic_db, self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME, asic_db_key) assert status == True # Get oid of Ethernet8 port_oid = util.get_port_oid_by_name(dvs, port) assert port_oid != None expected_attr_list_in_asic_db = [ (self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_MONITOR_PORT, port_oid), (self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_TYPE, "SAI_MIRROR_SESSION_TYPE_ENHANCED_REMOTE"), (self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_ERSPAN_ENCAPSULATION_TYPE, "SAI_ERSPAN_ENCAPSULATION_TYPE_MIRROR_L3_GRE_TUNNEL"), (self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_IPHDR_VERSION, "4"), # MIRROR_SESSION_DEFAULT_IP_HDR_VER (self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_TOS, "0"), (self._p4rt_mirror_session_wrapper.SAI_MIRROR_SESSION_ATTR_TTL, "64"), (self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_SRC_IP_ADDRESS, src_ip), (self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_DST_IP_ADDRESS, dst_ip), (self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_SRC_MAC_ADDRESS, src_mac), (self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_DST_MAC_ADDRESS, dst_mac), (self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_GRE_PROTOCOL_TYPE, "35006" ) # GRE_PROTOCOL_ERSPAN 0x88be ] util.verify_attr(fvs, expected_attr_list_in_asic_db) # 2. Modify the existing mirror session. new_dst_mac = "00:1A:11:17:5F:FF" attr_list_in_app_db[5] = (util.prepend_param_field( self._p4rt_mirror_session_wrapper.DST_MAC), new_dst_mac) self._p4rt_mirror_session_wrapper.set_app_db_entry( mirror_session_key, attr_list_in_app_db) util.verify_response(self._response_consumer, mirror_session_key, attr_list_in_app_db, "SWSS_RC_SUCCESS") # Query application database for the modified mirror key (status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME, mirror_session_key) assert status == True util.verify_attr(fvs, attr_list_in_app_db) # Query application state database for the modified mirror key (status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_state_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME, mirror_session_key) assert status == True util.verify_attr(fvs, attr_list_in_app_db) # Query ASIC DB about the modified mirror session. expected_attr_list_in_asic_db[9] = ( self._p4rt_mirror_session_wrapper. SAI_MIRROR_SESSION_ATTR_DST_MAC_ADDRESS, new_dst_mac) (status, fvs) = util.get_key( self._p4rt_mirror_session_wrapper.asic_db, self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME, asic_db_key) assert status == True util.verify_attr(fvs, expected_attr_list_in_asic_db) # 3. Delete the mirror session. self._p4rt_mirror_session_wrapper.remove_app_db_entry( mirror_session_key) util.verify_response(self._response_consumer, mirror_session_key, [], "SWSS_RC_SUCCESS") # Query application database for mirror entries appl_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.appl_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME) assert len(appl_mirror_entries) == len(original_appl_mirror_entries) # Query application database for the deleted mirror key (status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME, mirror_session_key) assert status == False # Query application state database for mirror entries appl_state_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.appl_state_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME + ":" + self._p4rt_mirror_session_wrapper.TBL_NAME) assert len(appl_state_mirror_entries) == len( original_appl_state_mirror_entries) # Query application state database for the deleted mirror key (status, fvs) = util.get_key(self._p4rt_mirror_session_wrapper.appl_state_db, self._p4rt_mirror_session_wrapper.APP_DB_TBL_NAME, mirror_session_key) assert status == False # Query ASIC database for mirror entries asic_mirror_entries = util.get_keys( self._p4rt_mirror_session_wrapper.asic_db, self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME) assert len(asic_mirror_entries) == len(original_asic_mirror_entries) # Query ASIC state database for the deleted mirror key (status, fvs) = util.get_key( self._p4rt_mirror_session_wrapper.asic_db, self._p4rt_mirror_session_wrapper.ASIC_DB_TBL_NAME, asic_db_key) assert status == False
#!/usr/bin/python #-*- coding:utf8 -*- import util import json from urllib import urlretrieve keys_obj = util.get_keys("keys.json") util.Client.init(keys_obj['client_id'],keys_obj['client_secret']) client = util.Client(keys_obj['username1'],keys_obj['password1']) ids = client.get_somebody_follower_ids(keys_obj['who_follower_avatar']) pic_url_list = [] for follower_id in ids: print 'id:'+str(follower_id) info = client.get_somebody_info(follower_id) try: pic_url = info['profile_image_url'] pic_url_list.append(pic_url) urlretrieve(pic_url,'./heads/'+str(follower_id)+'.jpeg') except: print 'err:'+str(follower_id) print 'msg:'+json.dumps(info) print '\n' continue print 'picNum:'+str(len(pic_url_list))