def go(bits): p_len = bits / 2 #make p random.seed() p = random.getrandbits((int)(p_len)) #random.randint(pow(2, p_len - 1), pow(2, p_len) - 1) if p % 2 == 0: p += 1 while prime(p) == 1: p += 2 q_len = bits - math.ceil(math.log(p, 2)) q = random.getrandbits((int)(q_len)) # random.randint(pow(2, q_len - 1), pow(2, q_len) - 1) if q % 2 == 0: q += 1 while prime(q) == 1: q += 2 phi = (p-1)*(q-1) n = p * q e = random.randint(2, phi) if e % 2 == 0: e += 1 while e_e(e, phi)[0] != 1: e += 2 l = e_e(e, phi) d = l[1] return [d, e, n]
def excitement(team1, team2): #This will take PIE into consideration and give the weaker team a 50/50 #chance to stay relevant, because the team with the higher PIE ratio would otherwise win #I have assigned around a 12% probability that the weaker team based on PIE will end up winning #in the event of a tie, we roll a 50/50 chance 7 times chance = bool(random.getrandbits(3)) if team1.teampie > team2.teampie and not chance: return team2 elif team1.teampie < team2.teampie and not chance: return team1 elif team1.teampie > team2.teampie: return team1 elif team1.teampie < team2.teampie: return team2 elif team1.teampie == team2.teampie: for x in range(0,7): chance = bool(random.getrandbits(1)) if chance: return team1 else: return team2
def __init__(self, keylen=256, template="pytoken.template.py"): self.keylen = keylen self.template = template self.serial = hex(random.getrandbits(8 * 4)) self.serial = "pT" + self.serial[2:-1] self.hmackey = hex(random.getrandbits(self.keylen)) self.hmackey = self.hmackey[2:-1]
def main(args): random.seed() if (len(args) < 3): print 'Please provide a server and a port' return cacheClient = CacheClient(args[1], int(args[2])) cnt = 0 numFailedPuts = 0 numFailedGets = 0 key = "asdf" val = "" cacheClient.put(key,val) while True: key = str(random.getrandbits(9)) if (bool(random.getrandbits(1))): value = str(random.getrandbits(10)) try: cacheClient.put(key, value) except: numFailedPuts += 1 else: try: cacheClient.get(key) except: numFailedGets += 1
def copyWithOffset(cls, entity, copyOffset, regenerateUUID=False): eTag = deepcopy(entity) # Need to check the content of the copy to regenerate the possible sub entities UUIDs. # A simple fix for the 1.9+ minecarts is proposed. positionTags = map(lambda p, co: nbt.TAG_Double(p.value + co), eTag["Pos"], copyOffset) eTag["Pos"] = nbt.TAG_List(positionTags) if eTag["id"].value in ("Painting", "ItemFrame"): eTag["TileX"].value += copyOffset[0] eTag["TileY"].value += copyOffset[1] eTag["TileZ"].value += copyOffset[2] if "Riding" in eTag: eTag["Riding"] = Entity.copyWithOffset(eTag["Riding"], copyOffset) # # Fix for 1.9+ minecarts if "Passengers" in eTag: passengers = nbt.TAG_List() for passenger in eTag["Passengers"]: passengers.append(Entity.copyWithOffset(passenger, copyOffset, regenerateUUID)) eTag["Passengers"] = passengers # # if regenerateUUID: # Courtesy of SethBling eTag["UUIDMost"] = nbt.TAG_Long((random.getrandbits(47) << 16) | (1 << 12) | random.getrandbits(12)) eTag["UUIDLeast"] = nbt.TAG_Long(-((7 << 60) | random.getrandbits(60))) return eTag
def __init__(self, name, created): """ Setup the class """ self.name = name self.folder = md5.new("%032x" % random.getrandbits(128)).hexdigest() self.share_code = md5.new("%032x" % random.getrandbits(128)).hexdigest() self.modified = datetime.now(pytz.utc) self.created = created
def generateRandoms(self, filterCount, traffic, seed): """ should generate random IP's, Port numbers """ dectionary = {"sourcIP": [], "destIP": [], "sourcePortNo": [], "destPortNo": []} sourceIP = "" destIP = "" random.seed(seed) for i in range(0, filterCount): if "IPV4" in traffic[i]: sourceIP = socket.inet_ntoa(struct .pack('>I', random .randint(1, 0xffffffff))) destIP = socket.inet_ntoa(struct .pack('>I', random .randint(1, 0xffffffff))) else: sourceIP = str(IPAddress('0::0:0') + random.getrandbits(16)) destIP = str(IPAddress('0::0:0') + random.getrandbits(16)) sourcePort = random.randint(0, 0xffff) destPort = random.randint(0, 0xffff) dectionary["sourcIP"].append(sourceIP) dectionary["destIP"].append(destIP) dectionary["sourcePortNo"].append(sourcePort) dectionary["destPortNo"].append(destPort) return dectionary
def send_auth_resp(event_source): if ser.isOpen(): try: key = int(random.getrandbits(32)) wait_for_firmware_update() byte0 = (key & 0xFF000000) >> 24 byte1 = (key & 0xFF0000) >> 16 byte2 = (key & 0xFF00) >> 8 byte3 = (key & 0xFF) while ord(',') == byte0 or ord(',') == byte1 or ord(',') == byte2 or ord(',') == byte3: key = int(random.getrandbits(32)) byte0 = (key & 0xFF000000) >> 24 byte1 = (key & 0xFF0000) >> 16 byte2 = (key & 0xFF00) >> 8 byte3 = (key & 0xFF) ser.write(("c" + str(event_source) + "0>").encode()) ser.write([(key & 0xFF000000) >> 24, (key & 0xFF0000) >> 16, (key & 0xFF00) >> 8, key & 0xFF]) ser.write(",".encode()) time.sleep(0.5) # give the serial port sometime to receive the data except Exception as e1: tb = traceback.format_exc() logger.error("error communicating...: " + str(e1)) logger.error(str(tb)) else: logger.error("cannot open serial port.") return key
def createRequest(self,m,username=None,auth=None,cid=None,cseq=1,fromaddr=None,toaddr=None,contact=None): from base64 import b64encode from libs.svhelper import makeRequest from libs.svhelper import createTag if cid is None: cid='%s' % str(random.getrandbits(32)) branchunique = '%s' % random.getrandbits(32) localtag=createTag(username) if not contact: contact = 'sip:%s@%s' % (username,self.domain) if not fromaddr: fromaddr = '"%s"<sip:%s@%s>' % (username,username,self.domain) if not toaddr: toaddr = '"%s"<sip:%s@%s>' % (username,username,self.domain) request = makeRequest( m, fromaddr, toaddr, self.domain, self.dstport, cid, self.externalip, branchunique, cseq, auth, localtag, self.compact, contact=contact, localport=self.localport, extension=username ) return request
def End2End(cur): """ Generate a BIP key using random entropy each time Decrypt BIP Key to get compressed WIF Key Encrypt WIF Key to test that same BIP Key is generated. Also check the generated public address at each stage """ #build the entropy entropy = [] count = 0 while count <= 52: count+=1 entropy.append((int(random.getrandbits(52)),int(random.getrandbits(52)))) #generate a BIP key and address using the entropy above BIP_1, publicAddress_1 = gen.genBIPKey(cur, 'End2EndTest', entropy) #decrypt the BIP key to give a WIF key and address WIF, publicAddress_2 = gen.decBIPKey(BIP_1, 'End2EndTest', cur) #encrypt the WIF key to give a BIP Key and address BIP_2, publicAddress_3 = gen.encBIPKey(WIF, cur, 'End2EndTest') #chekc that the BIP Keys are the same if BIP_1 != BIP_2: print(cur + ' BIP Keys do not match\n' + BIP_1 + ' != ' + BIP_2) return False #check that the public keys are the same if publicAddress_1 != publicAddress_2 != publicAddress_3: print(cur + ' public addresses do not match\n' + publicAddress_1 + ' != ' + publicAddress_2 + ' != ' + publicAddress_3) return False return True
def test_numeric(self): """Test data insertion and retrieval to numeric columns.""" conn = self.database.connection() cursor = conn.cursor() dialect = self.database.dialect() dbapi = self.database.dbapi() query = dialect.translate('DROP TABLE test_numeric') try: cursor.execute(query) except dbapi.Error: conn.rollback() query = dialect.translate('CREATE TABLE test_numeric ' \ '( value NUMERIC(100,50) NOT NULL )') cursor.execute(query) data = [] query = 'INSERT INTO test_numeric VALUES (%s)' for i in range(100): int = random.getrandbits(150) frac = random.getrandbits(150) item = decimal.Decimal('%d.%s' % (int, frac)) data.append(item) cursor.execute(query, (item,)) query = 'SELECT * FROM test_numeric' cursor.execute(query) result = cursor.fetchall() for row in result: item = row[0] assert isinstance(item, decimal.Decimal) assert item in data data.remove(item) query = dialect.translate('DELETE FROM test_numeric') cursor.execute(query) query = dialect.translate('DROP TABLE test_numeric') cursor.execute(query) conn.commit()
def replace_symlink(source,target,logger=None): """!Do not call this routine directly: you want make_symlink instead. This routine creates a new symbolic link and renames that link to "target." That always replaces target with a symbolic link to source, even if target did not already exist. @param source the file to link from @param target the file to link to @param logger a logging.Logger for messages""" tempname=os.path.join(os.path.dirname(target), 'tmp.%s.%06x.%06x.tmp' % ( os.path.basename(target), random.getrandbits(32),random.getrandbits(32))) try: if logger is not None: logger.info('link %s -> %s'%(tempname,source)) os.symlink(source,tempname) if logger is not None: logger.info('rename %s to %s'%(tempname,target)) os.rename(tempname,target) except Exception as e: try: if logger is not None: logger.info('failed: delete %s'%(tempname,)) os.remove(tempname) except EnvironmentError: pass raise
def generate_random_prime(n): def isProbablePrime(n, t = 7): def isComposite(a): if pow(a, d, n) == 1: return False for i in range(s): if pow(a, 2 ** i * d, n) == n - 1: return False return True assert n > 0 if n < 3: return [False, False, True][n] elif not n & 1: return False else: s, d = 0, n - 1 while not d & 1: s += 1 d >>= 1 for _ in itertools.repeat(None, t): if isComposite(random.randrange(2, n)): return False return True p = random.getrandbits(n) while not isProbablePrime(p): p = random.getrandbits(n) return p
def printmatrix(n): d = [[] for x in range(n)] for l in d: l = [[(random.getrandbits(1))]] * n for item in l: item.append((random.getrandbits(1))) print(item)
def save(self, *args, **kwargs): if self.uid is None or self.uid == "": key = hex(random.getrandbits(32)).rstrip("L").lstrip("0x") while Group.objects.filter(uid=key).count() > 0: key = hex(random.getrandbits(32)).rstrip("L").lstrip("0x") self.uid = key super(Group, self).save(*args, **kwargs)
def test_csv_endpoint(self): self.login('admin') sql = """ SELECT first_name, last_name FROM ab_user WHERE first_name='admin' """ client_id = '{}'.format(random.getrandbits(64))[:10] self.run_sql(sql, client_id, raise_on_error=True) resp = self.get_resp('/superset/csv/{}'.format(client_id)) data = csv.reader(io.StringIO(resp)) expected_data = csv.reader( io.StringIO('first_name,last_name\nadmin, user\n')) sql = "SELECT first_name FROM ab_user WHERE first_name LIKE '%admin%'" client_id = '{}'.format(random.getrandbits(64))[:10] self.run_sql(sql, client_id, raise_on_error=True) resp = self.get_resp('/superset/csv/{}'.format(client_id)) data = csv.reader(io.StringIO(resp)) expected_data = csv.reader( io.StringIO('first_name\nadmin\n')) self.assertEqual(list(expected_data), list(data)) self.logout()
def generate_primes(): bits = int(bit_number.get()) if bit_number == None: bit_number = 10 feedback_label = Label(gui,text = 'Generating {0}bit Keys . . . . . . . '.format(bits)).place(x = 10, y = 40) prime1 = random.getrandbits(bits) prime2 = random.getrandbits(bits) check_prime(prime1) while check_prime(prime1) == False: prime1 = random.getrandbits(bits) check_prime(prime2) while check_prime(prime2) == False: prime2 = random.getrandbits(bits) fie_function = 0 N = 0 decryption_key = 0 encryption_key = 0 fie_function,N,decryption_key,encryption_key = generating_keys(prime1,prime2) print(prime1,prime2) feedback_label = Label(gui,text = 'Keys Generated').place(x = 10, y = 40) return[fie_function,N,decryption_key,encryption_key]
def buildSymmetricKey(block_encryption_algorithm=BLOCK_ENCRYPTION_AES128_CBC): sym_key = Object() block_encryption_props = blockEncryptionProperties[block_encryption_algorithm] sym_key.sym_key = ''.join([chr(random.getrandbits(8)) for i in range(0, block_encryption_props['key_size'])]) sym_key.iv = ''.join([chr(random.getrandbits(8)) for i in range(0, block_encryption_props['iv_size'])]) sym_key.block_encryption_algorithm = block_encryption_algorithm return sym_key
def turn_evil(self, evilPort): old_ping = self.rpc_ping old_find_node = self.rpc_find_node old_find_value = self.rpc_find_value self.router.node.port = evilPort; if self.evilType == "poison": self.rpc_find_node = self.poison_rpc_find_node self.rpc_find_value = self.poison_rpc_find_value self.false_neighbour_list = [] for i in range(0, 30): fakeid = hashlib.sha1(str(random.getrandbits(255))).digest() fake_neighbour = [fakeid, '10.0.0.9', self.router.node.port] self.false_neighbour_list.append(fake_neighbour) _log.debug("Node with port {} prepared to execute " "poisoning attack".format(self.router.node.port)) elif self.evilType == "insert": self.rpc_find_node = self.sybil_rpc_find_node self.rpc_find_value = self.poison_rpc_find_value ends = bytearray([0x01, 0x02, 0x03]) self.false_neighbour_list = [] for i in range(0, 9): if i < 3: key = digest("APA") elif i > 5: key = digest("KANIN") else: key = digest("KOALA") key = key[:-1] + bytes(ends[i % 3]) self.false_neighbour_list.append((key, '10.0.0.9', self.router.node.port)) _log.debug("Node with port {} prepared to execute node " "insertion attack".format(self.router.node.port)) elif self.evilType == "eclipse": self.rpc_find_node = self.eclipse_rpc_find_node self.rpc_find_value = self.eclipse_rpc_find_value self.closest_neighbour = map(list, self.router.findNeighbors((self.router.node))) self.false_neighbour_list = [] for i in range(0, 10): fakeid = hashlib.sha1(str(random.getrandbits(255))).digest() self.false_neighbour_list.append((fakeid, '10.0.0.9', self.router.node.port)) _log.debug("Node with port {} prepared to execute eclipse " "attack on {}".format(self.router.node.port, self.closest_neighbour[0][2])) elif self.evilType == "sybil": self.rpc_find_node = self.sybil_rpc_find_node self.rpc_find_value = self.poison_rpc_find_value self.false_neighbour_list = [] for i in range(0, 30): fakeid = [hashlib.sha1(str(random.getrandbits(255))).digest()] fake_neighbour = [fakeid, '10.0.0.9', self.router.node.port] self.false_neighbour_list.append(fake_neighbour) _log.debug("Node with port {} prepared to execute " "Sybil attack".format(self.router.node.port))
def test_nested_insert(self): squares = np.arange(NUM_TEST_RECORDS) ** 2 squares = np.ma.masked_array(squares, np.zeros(NUM_TEST_RECORDS), dtype="float64") rand = np.random.uniform(0, 5, NUM_TEST_RECORDS) rand = np.ma.masked_array(rand, np.zeros(NUM_TEST_RECORDS), dtype="float64") rand_bools = [bool(random.getrandbits(1)) for _ in range(NUM_TEST_RECORDS)] unmasked = np.ma.masked_array(rand_bools, np.zeros(NUM_TEST_RECORDS), dtype="bool") rand_bools = [bool(random.getrandbits(1)) for _ in range(NUM_TEST_RECORDS)] masked = np.ma.masked_array(rand_bools, np.ones(NUM_TEST_RECORDS), dtype="bool") with monary.Monary() as m: m.insert( "monary_test", "data", monary.MonaryParam.from_lists( [squares, rand, self.seq, unmasked, masked], ["data.sqr", "data.rand", "sequence", "x.y.real", "x.y.fake"])) with pymongo.MongoClient() as c: col = c.monary_test.data for i, doc in enumerate(col.find().sort( [("sequence", pymongo.ASCENDING)])): assert doc["sequence"] == i assert rand[i] == doc["data"]["rand"] assert squares[i] == doc["data"]["sqr"] assert "fake" not in doc["x"]["y"] assert unmasked[i] == doc["x"]["y"]["real"] with pymongo.MongoClient() as c: c.drop_database("monary_test")
def setService(username, isAdmin=False): '''adds new user''' conn = Connection() try: import random import hashlib access = hashlib.sha1() secret = hashlib.sha1() access.update(username + str(random.getrandbits(16))) secret.update(str(random.getrandbits(16)) + username) accessHexDigest = access.hexdigest() secretHexDigest = secret.hexdigest() success = False for i in range(3): try: conn.executeStatement('insert into user(username, accesskey, secretkey, isAdmin) values(%s, %s, %s, %s)', (username, accessHexDigest, secretHexDigest, bool(isAdmin))) except InternalErrorException.DatabaseIntegrityErrorException: access.update(str(random.getrandbits(16))) secret.update(str(random.getrandbits(16))) accessHexDigest = access.hexdigest() secretHexDigest = secret.hexdigest() else: success = True break if not success: raise InternalErrorException.KeyCollisionErrorException() except: conn.cancelAndClose() raise else: conn.close() return (accessHexDigest, secretHexDigest)
def updateTest(self, deltaTime): # getrandbits is around 5x faster than using randint bgcolors = [(random.getrandbits(7), random.getrandbits(7), random.getrandbits(7)) for _ in range(self.total)] char = [random.getrandbits(8) for _ in range(self.total)] fgcolor = (255, 255, 255) for (x,y), bgcolor, char in zip(self.cells, bgcolors, char): self.console.draw_char(x, y, char, fgcolor, bgcolor)
def test_leave_unbinding_failure(self, GivenException): fake_docker_network_id = hashlib.sha256(str(random.getrandbits(256))).hexdigest() fake_docker_endpoint_id = hashlib.sha256(str(random.getrandbits(256))).hexdigest() fake_neutron_network_id = str(uuid.uuid4()) self._mock_out_network(fake_neutron_network_id, fake_docker_network_id) fake_neutron_port_id = str(uuid.uuid4()) self.mox.StubOutWithMock(app.neutron, "list_ports") neutron_port_name = utils.get_neutron_port_name(fake_docker_endpoint_id) fake_neutron_v4_subnet_id = str(uuid.uuid4()) fake_neutron_v6_subnet_id = str(uuid.uuid4()) fake_neutron_ports_response = self._get_fake_ports( fake_docker_endpoint_id, fake_neutron_network_id, fake_neutron_port_id, fake_neutron_v4_subnet_id, fake_neutron_v6_subnet_id, ) app.neutron.list_ports(name=neutron_port_name).AndReturn(fake_neutron_ports_response) fake_neutron_port = fake_neutron_ports_response["ports"][0] fake_message = "fake message" fake_exception = GivenException(fake_message) self._port_unbind_with_exception(fake_docker_endpoint_id, fake_neutron_port, fake_exception) response = self._invoke_leave_request(fake_docker_network_id, fake_docker_endpoint_id) self.assertEqual(w_exceptions.InternalServerError.code, response.status_code) decoded_json = jsonutils.loads(response.data) self.assertTrue("Err" in decoded_json) self.assertTrue(fake_message in decoded_json["Err"])
def test_delete_endpiont_port_failures(self, GivenException): fake_docker_network_id = hashlib.sha256( str(random.getrandbits(256))).hexdigest() fake_docker_endpoint_id = hashlib.sha256( str(random.getrandbits(256))).hexdigest() fake_neutron_network_id = str(uuid.uuid4()) fake_neutron_subnet_v4_id = str(uuid.uuid4()) fake_neutron_subnet_v6_id = str(uuid.uuid4()) fake_neutron_port_id = str(uuid.uuid4()) self._mock_out_network(fake_neutron_network_id, fake_docker_network_id) self.mox.StubOutWithMock(app.neutron, 'list_ports') fake_ports = self._get_fake_ports( fake_docker_endpoint_id, fake_neutron_network_id, fake_neutron_port_id, fake_neutron_subnet_v4_id, fake_neutron_subnet_v6_id) app.neutron.list_ports( network_id=fake_neutron_network_id).AndReturn(fake_ports) self._delete_port_with_exception(fake_neutron_port_id, GivenException) response = self._invoke_delete_request( fake_docker_network_id, fake_docker_endpoint_id) self.assertEqual(GivenException.status_code, response.status_code) decoded_json = jsonutils.loads(response.data) self.assertTrue('Err' in decoded_json) self.assertEqual({'Err': GivenException.message}, decoded_json)
def mate_random_cross(chrom1, chrom2): """ Mating function with random gene exchange Parameters: ----------- chrom1: array-like (list, tuple) First chromosome chrom2: array-like (list, tuple) Second chromosome Returns: ------- offspring1: tuple First offspring offspring2: tuple Second offspring """ l = len(chrom1) offspring1 = [True] * l offspring2 = [True] * l for i in range(l): if random.getrandbits(1) == 0: offspring1[i] = chrom1[i] else: offspring1[i] = chrom2[i] if random.getrandbits(1) == 0: offspring2[i] = chrom1[i] else: offspring2[i] = chrom2[i] return tuple(offspring1), tuple(offspring2)
def comment(userID, theArticleID): connect("sns", host = "") token = request.headers.get("Token") authStatus = authenticateUser(token) if(authStatus == 412): return Response("Must supply an authentication token", status=authStatus) elif(authStatus == 401): return Response("Authorization required", status = authStatus) elif(authStatus == 200): user = User.objects(emailAddress=unicode(userID)).first() article = Article.objects(articleID=unicode(theArticleID)).first() if(user != None and article != None): if(request.method == "POST"): print "Current comments:", article.comments print "Comment to add =", str(request.data) theCommentID = str(random.getrandbits(64)) while(Comment.objects(commentID = theCommentID).first() != None): theCommentID = str(random.getrandbits(64)) comment = Comment(commentID=theCommentID, authorID=userID, articleID=theArticleID, comment=request.data) comment.save() article.comments.append(theCommentID) article.save() print "Comment saved" return Response("Saved Comment", status=authStatus, mimetype="application/json") elif(request.method == "GET"): result = [] comments = Comment.objects(articleID=theArticleID) for comment in comments: result.append({key:comment[key] for key in ("authorID", "comment")}) result = {"comments":result} jsonString = json.dumps(result) return Response(jsonString, status=authStatus) else: return Response(status=500)
def test_create_endpoint_subnet_failures(self, GivenException): fake_docker_network_id = hashlib.sha256( str(random.getrandbits(256))).hexdigest() fake_docker_endpoint_id = hashlib.sha256( str(random.getrandbits(256))).hexdigest() fake_neutron_network_id = str(uuid.uuid4()) self.mox.StubOutWithMock(app.neutron, 'list_subnets') app.neutron.list_subnets( network_id=fake_neutron_network_id, cidr='192.168.1.0/24').AndReturn({'subnets': []}) app.neutron.list_subnets( network_id=fake_neutron_network_id, cidr='fe80::/64').AndReturn({'subnets': []}) self._create_subnet_with_exception( fake_neutron_network_id, fake_docker_endpoint_id, GivenException()) self._mock_out_network(fake_neutron_network_id, fake_docker_network_id) response = self._invoke_create_request( fake_docker_network_id, fake_docker_endpoint_id) self.assertEqual(GivenException.status_code, response.status_code) decoded_json = jsonutils.loads(response.data) self.assertTrue('Err' in decoded_json) self.assertEqual({'Err': GivenException.message}, decoded_json)
def __init__(self,mmC,rloc): self.accessLock = rloc self.tid = str(random.getrandbits(128)) #System vars self.userID = "1" self.authenticated = False self.disconnect = False self.randomHash = str(random.getrandbits(64)) #MM vars self.searchingClash = False self.searchingClashFound = False #Game vars self.questionsRequested = False #db connection self.dbM = ServiceDBMinor() #MM self.mmControl = mmC debugElo = 1000 #Player ELo self.mmClient = MMGameClient(debugElo,self.tid)
def test_read_and_write(self): # word space self.assertEqual(self.client.read_holding_registers(0), [0], 'Default value is 0 when server start') self.assertEqual(self.client.read_input_registers(0), [0], 'Default value is 0 when server start') # single read/write self.assertEqual(self.client.write_single_register(0, 0xffff), True) self.assertEqual(self.client.read_input_registers(0), [0xffff]) # multi-write at max size words_l = [randint(0, 0xffff)] * 0x7b self.assertEqual(self.client.write_multiple_registers(0, words_l), True) self.assertEqual(self.client.read_holding_registers(0, len(words_l)), words_l) self.assertEqual(self.client.read_input_registers(0, len(words_l)), words_l) # write over sized words_l = [randint(0, 0xffff)] * 0x7c self.assertEqual(self.client.write_multiple_registers(0, words_l), None) # bit space self.assertEqual(self.client.read_coils(0), [False], 'Default value is False when server start') self.assertEqual(self.client.read_discrete_inputs(0), [False], 'Default value is False when server start') # single read/write self.assertEqual(self.client.write_single_coil(0, True), True) self.assertEqual(self.client.read_coils(0), [True]) self.assertEqual(self.client.read_discrete_inputs(0), [True]) # multi-write at min size bits_l = [getrandbits(1)] * 0x1 self.assertEqual(self.client.write_multiple_coils(0, bits_l), True) self.assertEqual(self.client.read_coils(0, len(bits_l)), bits_l) self.assertEqual(self.client.read_discrete_inputs(0, len(bits_l)), bits_l) # multi-write at max size bits_l = [getrandbits(1)] * 0x7b0 self.assertEqual(self.client.write_multiple_coils(0, bits_l), True) self.assertEqual(self.client.read_coils(0, len(bits_l)), bits_l) self.assertEqual(self.client.read_discrete_inputs(0, len(bits_l)), bits_l) # multi-write over sized bits_l = [getrandbits(1)] * 0x7b1 self.assertEqual(self.client.write_multiple_coils(0, bits_l), None)
def dhtest(label, dh): # Test that DH(m, P) == [392*m]P P = (Gx, Gy) failed = 0 for i in range(TEST_LOOPS): m = getrandbits(256) Q1 = dh(m, P) Q2 = R1toAffine(MUL_windowed(392 * m, AffineToR1(P[0], P[1]))) if Q1 != Q2: failed += 1 P = Q1 if failed == 0: print "[PASS] DH-{}-392".format(label) else: print "[FAIL] DH-{}-392".format(label) # Test that DH has the symmetry property G = (Gx, Gy) failed = 0 for i in range(TEST_LOOPS): a = getrandbits(256) b = getrandbits(256) abG = dh(a, dh(b, G)) baG = dh(b, dh(a, G)) if abG != baG: failed += 1 if failed == 0: print "[PASS] DH-{}-symm".format(label) else: print "[FAIL] DH-{}-symm {}".format(label, failed)
MEASUREMENT_TIME + GATE_TIME) # Fourth step: odd-indexed rows' qubits to their bottom neighbours # Apply CZ noisy_file_strings[i] += entangle_bottom(False, False) # Apply two-qubit CZ noise noisy_file_strings[i] += entangle_bottom(False, True) # Apply time-based noise # Time needed is 1 link + 1 Bell measurement to teleport + 1 apply gate noisy_file_strings[i] += time_based_noise(LINKING_TIME + MEASUREMENT_TIME + GATE_TIME) # Generate random bit string for random T-gate application # Must be identical across noisy files, as well as with the perfect file t_gates_bit_string = [ bool(random.getrandbits(1)) for x in range(num_qubits) ] # Apply T gates to perfect file for i in range(num_qubits): if t_gates_bit_string[i]: perfect_file_string += get_single_qubit_gate_line(i, 'T') # Apply T gates to noisy files for i in range(noisy_files_count): for j in range(num_qubits): if t_gates_bit_string[j]: # Apply T-gate noisy_file_strings[i] += get_single_qubit_gate_line(j, 'T') # Apply single-qubit noise noisy_file_strings[i] += single_qubit_noise(j)
def random_flip_leftright(x): if bool(random.getrandbits(1)): return np.fliplr(x) else: return x
import random testm = [[True, True, False], [True, False, False], [False, False, False]] w = 20 temp = [[True for x in range(w)] for y in range(w)] Matrix = [[bool(random.getrandbits(1)) for x in range(w)] for y in range(w)] #Matrix = testm on = "⬤ " off = "○ " def main(): run(5) #setupRand() def disp(): for i in range(w): for j in range(w): if Matrix[i][j]: print(on, end=' ') else: print(off, end=' ') print() print() def run(num): for x in range(num): gen() disp()
def apply(self, img, scale, **params): # pylint: disable=arguments-differ rand = np.random.RandomState(random.getrandbits(32)) return tk.ndimage.gaussian_noise(img, rand, scale)
def apply(self, img, scale, **params): # pylint: disable=arguments-differ rand = np.random.RandomState(random.getrandbits(32)) noise = rand.randn(*img.shape) * scale noise = scipy.ndimage.gaussian_filter(noise, 1) return np.uint8(np.clip(img + noise, 0, 255))
def _random_flip_leftright(batch): for i in range(len(batch)): if bool(random.getrandbits(1)): batch[i] = np.fliplr(batch[i]) return batch
def gen_boundary(): """Returns a random string to use as the boundary for a message""" bits = random.getrandbits(160) return sha.new(str(bits)).hexdigest()
def start(self): from libs.svhelper import makeRequest, createTag from libs.svhelper import mysendto import socket # bind to 5060 - the reason is to maximize compatability with # devices that disregard the source port and send replies back # to port 5060 self.log.debug("binding to %s:%s" % (self.bindingip, self.localport)) while 1: if self.localport > 65535: self.log.critical("Could not bind to any port") return try: self.sock.bind((self.bindingip, self.localport)) break except socket.error: self.log.debug("could not bind to %s" % self.localport) self.localport += 1 if self.originallocalport != self.localport: self.log.warn( "could not bind to %s:%s - some process might already be listening on this port. Listening on port %s instead" % (self.bindingip, self.originallocalport, self.localport)) self.log.info( "Make use of the -P option to specify a port to bind to yourself" ) while 1: r, w, e = select.select(self.rlist, self.wlist, self.xlist, self.selecttime) if r: # we got stuff to read off the socket try: buff, srcaddr = self.sock.recvfrom(8192) self.log.debug('got data from %s:%s' % srcaddr) self.log.debug('data: %s' % ` buff `) if self.printdebug: print srcaddr print buff except socket.error: continue self.getResponse(buff, srcaddr) else: # no stuff to read .. its our turn to send back something if self.nomoretoscan: try: # having the final sip self.log.debug("Making sure that no packets get lost") self.log.debug("Come to daddy") while 1: buff, srcaddr = self.sock.recvfrom(8192) if self.printdebug: print srcaddr print buff self.getResponse(buff, srcaddr) except socket.error: break try: nextscan = self.scaniter.next() except StopIteration: self.log.debug('no more hosts to scan') self.nomoretoscan = True continue dstip, dstport, method = nextscan self.nextip = dstip dsthost = (dstip, dstport) branchunique = '%s' % random.getrandbits(32) localtag = createTag('%s%s' % (''.join( map(lambda x: '%02x' % int(x), dsthost[0].split('.'))), '%04x' % dsthost[1])) cseq = 1 fromaddr = '"%s"<%s>' % (self.fromname, self.fromaddr) toaddr = fromaddr callid = '%s' % random.getrandbits(80) contact = None if method != 'REGISTER': contact = 'sip:%s@%s:%s' % ( self.extension, self.externalip, self.localport) data = makeRequest(method, fromaddr, toaddr, dsthost[0], dsthost[1], callid, self.externalip, branchunique, compact=self.compact, localtag=localtag, contact=contact, accept='application/sdp', localport=self.localport, extension=self.extension) try: self.log.debug("sending packet to %s:%s" % dsthost) self.log.debug("packet: %s" % ` data `) mysendto(self.sock, data, dsthost) self.sentpackets += 1 #self.sock.sendto(data,dsthost) if self.sessionpath is not None: if self.packetcount.next(): try: f = open( os.path.join(self.sessionpath, 'lastip.pkl'), 'w') pickle.dump(self.nextip, f) f.close() self.log.debug('logged last ip %s' % self.nextip) except IOError: self.log.warn( 'could not log the last ip scanned') if self.first is not None: if self.sentpackets >= self.first: self.log.info( 'Reached the limit to scan the first %s packets' % self.first) self.nomoretoscan = True except socket.error, err: self.log.error( "socket error while sending to %s:%s -> %s" % (dsthost[0], dsthost[1], err)) pass
def get_random_value_hex(nbytes): pseudo_random_value = random.getrandbits(8 * nbytes) prv_hex = "{:x}".format(pseudo_random_value) return prv_hex.zfill(2 * nbytes)
def ones_and_zeros(digits): return bin(random.getrandbits(digits)).lstrip('0b').zfill(digits)
def _writeSeed(stream, bits, length): seed = random.getrandbits(bits) while len(str(seed)) != length: seed = random.getrandbits(bits) stream.write('\nset seed {}'.format(seed))
proc_type = MProcessingType.multiprocessing if sys.platform != "linux": logger.warning( "Manticore is only supported on Linux. Proceed at your own risk!") proc_type = MProcessingType.threading consts.add( "mprocessing", default=proc_type, description= "single: No multiprocessing at all. Single process.\n threading: use threads\n multiprocessing: use forked processes", ) consts.add( "seed", default=random.getrandbits(32), description="The seed to use when randomly selecting states", ) class ManticoreBase(Eventful): def __new__(cls, *args, **kwargs): if cls in (ManticoreBase, ManticoreSingle, ManticoreThreading, ManticoreMultiprocessing): raise ManticoreError("Should not instantiate this") cl = consts.mprocessing.to_class() # change ManticoreBase for the more specific class bases = { cl if issubclass(base, ManticoreBase) else base for base in cls.__bases__
from pwn import * import binascii import random labels = [] hashes = [] key = int('6374ef84e1382b0b2913b81d3c73ba00', 0x10) for _ in range(3): r = [] test = [] for i in range(50): b = bytearray([random.getrandbits(8) for _ in range(16)]) labels.append(binascii.hexlify(b)) _r = remote('crypto.sect.ctf.rocks', 3333) _r.send('\n') _r.send(b) _r.send('\n\n') r.append(_r) _r = remote('crypto.sect.ctf.rocks', 3333) _r.send('\n') _r.send(b + b) _r.send('\n\n') test.append(_r) for i in range(50): _r = r[i] res = _r.recvuntil('/etc/shadow.') h = hex(int(res.split()[-3].split(':')[-1], 0x10)
def test_unknownRPC(self): self.assertFalse( self.handler.receive_message(str(random.getrandbits(1400))))
def insert_data(self): threading.Timer(10.0, self.insert_data).start() readings = [] for x in range(1, 4): temp_sensor_value = random.randint(15, 35) hum_sensor_value = random.randint(75, 95) pow_sensor_value = random.randint(200, 250) s1_sensor_value = random.randint(25, 35) s2_sensor_value = random.randint(25, 35) st_sensor_value = random.randint(50, 70) movement_alarm_value = random.getrandbits(1) smoke_alarm_value = random.getrandbits(1) readings.append({ "rack": x, "sensor": 1, "sensor_type": "temp", "sensor_value": temp_sensor_value }) readings.append({ "rack": x, "sensor": 1, "sensor_type": "hum", "sensor_value": hum_sensor_value }) readings.append({ "rack": x, "sensor": 1, "sensor_type": "pduPower", "sensor_value": pow_sensor_value }) readings.append({ "rack": x, "sensor": 1, "sensor_type": "pduStatus1", "sensor_value": s1_sensor_value }) readings.append({ "rack": x, "sensor": 1, "sensor_type": "pduStatus2", "sensor_value": s2_sensor_value }) readings.append({ "rack": x, "sensor": 1, "sensor_type": "pduStatusT", "sensor_value": st_sensor_value }) readings.append({ "rack": x, "sensor": 1, "sensor_type": "smoke", "sensor_value": movement_alarm_value }) readings.append({ "rack": x, "sensor": 1, "sensor_type": "movement", "sensor_value": smoke_alarm_value }) myobj = {"readings": readings} # myobj2 = { # "readings": [ # {"rack": 1, "sensor": 1, "sensor_type": "temp", "sensor_value": temp_sensor_value}, # {"rack": 1, "sensor": 1, "sensor_type": "hum", "sensor_value": hum_sensor_value} # ] # } # print(myobj) # print(myobj2) # if(alarm_value != self.alarm): # print('value: ' + str(alarm_value)) # session = requests.Session() # session.trust_env = False # r = session.post('http://localhost:8000/dashboard/alarm/' + str(self.alarm_int)) # print('b_alarm: ' + str(self.alarm) + ', b_alarm_int: ' + str(self.alarm_int)) # self.alarm = not self.alarm # self.alarm_int = 1 - self.alarm_int # print('a_alarm: ' + str(self.alarm) + ', a_alarm_int: ' + str(self.alarm_int)) self.write_data(myobj)
def generate_salt(): return str(random.getrandbits(32))
__author__ = 'Edgar' import os import sys import random if (len(sys.argv) != 3): print("Usage: BaseFormConverter.py input_file output_file") else: input_file = open(sys.argv[1], "r") output_file = open(sys.argv[2], "w") hash = random.getrandbits(128) mystem_output_file = open(str(hash), "w+") print(str(hash)) command = 'mystem -n -e utf-8 ' + sys.argv[1] + " " + str(hash) print(command) os.system(command) lines = mystem_output_file.readlines() for line in lines: temp = line.replace('{', ' ').replace('}', ' ').replace('|', ' ').replace('\n', ' ') output_file.write(temp.split()[-1] + "\n") input_file.close()
def populate_single_restaurant(BasicUser): created_at = fakegen.date_time_this_year() company_name = fakegen.company() main_contact = BasicUser phone_number = random.randint(3000000, 9000000) schedule = populate_single_restaurant_Schedule() role = random.choice(['ADMIN', 'SUPER', 'MNGR', 'DEV', 'STAFF']) meals = random.randint(1, 10000) uber_eats = bool(random.getrandbits(1)) delivery_capacity = bool(random.getrandbits(1)) packaging = bool(random.getrandbits(1)) health_certificate = fakegen.address() address = fakegen.address() coordinates = fakegen.name() latitude = 43.656560 longitude = -79.435408 # create application review app_review = ApplicationReview.objects.create(created_at=created_at,\ type=BasicUser.user_object.type, model_id=BasicUser, status='P') if random.randint(1, 1000) % 2: # Approve review = None app_review.status = 'A' app_review.admin_by_id = random.choice( UserClass.objects.filter(user_type='ADM')) app_review.save() else: if random.randint(1, 1000) % 2: # Reject app_review.status = 'R' app_review.comments = fakegen.sentence() app_review.admin_by_id = random.choice( UserClass.objects.filter(user_type='ADM')) app_review.save() review = app_review # adding a notification for the application review notification = \ Notification.objects.get_or_create(created_at=created_at, notification_type='A', is_dismissed=False, application=app_review)[0] notification.save() restaurant = Restaurant.objects.get_or_create(created_at=created_at,\ company_name=company_name,\ main_contact=main_contact,\ phone_number=phone_number,\ schedule=schedule,\ meals=meals,\ uber_eats=uber_eats,\ delivery_capacity=delivery_capacity,\ packaging=packaging,\ health_certificate=health_certificate,\ address=address,\ coordinates=coordinates,\ latitude=latitude,\ longitude=longitude,\ review=review)[0] restaurant.save() return restaurant
def execute(self, args, show_output=True, admin_password=None, needs_admin_created_flag=False): # stdlib import os, json from random import getrandbits from uuid import uuid4 # Django from django.core.management import call_command # Python 2/3 compatibility from past.builtins import unicode # Zato # TODO: There really shouldn't be any direct dependency between zato-cli and zato-web-admin from zato.admin.zato_settings import update_globals from zato.cli import common_logging_conf_contents, is_arg_given from zato.common.crypto.api import WebAdminCryptoManager from zato.common.crypto.const import well_known_data from zato.common.defaults import web_admin_host, web_admin_port os.chdir(self.target_dir) repo_dir = os.path.join(self.target_dir, 'config', 'repo') web_admin_conf_path = os.path.join(repo_dir, 'web-admin.conf') initial_data_json_path = os.path.join(repo_dir, 'initial-data.json') os.mkdir(os.path.join(self.target_dir, 'logs')) os.mkdir(os.path.join(self.target_dir, 'config')) os.mkdir(repo_dir) user_name = 'admin' admin_password = admin_password if admin_password else WebAdminCryptoManager.generate_password( ) # If we have a CA's certificate then it implicitly means that there is some CA # which tells us that we are to trust both the CA and the certificates that it issues, # and the only certificate we are interested in is the one to the load-balancer. # This is why, if we get ca_certs_path, it must be because we are to use TLS # in communication with the load-balancer's agent which in turn means that we have crypto material on input. has_crypto = is_arg_given(args, 'ca_certs_path') if has_crypto: self.copy_web_admin_crypto(repo_dir, args) zato_secret_key = WebAdminCryptoManager.generate_key() cm = WebAdminCryptoManager.from_secret_key(zato_secret_key) django_secret_key = uuid4().hex.encode('utf8') django_site_id = getrandbits(20) admin_invoke_password = getattr(args, 'admin_invoke_password', None) if not admin_invoke_password: admin_invoke_password = '******' + uuid4().hex if isinstance(admin_invoke_password, unicode): admin_invoke_password = admin_invoke_password.encode('utf8') odb_password = args.odb_password or '' odb_password = odb_password.encode('utf8') config = { 'host': web_admin_host, 'port': web_admin_port, 'db_type': args.odb_type, 'log_config': 'logging.conf', 'lb_agent_use_tls': 'false', 'zato_secret_key': zato_secret_key, 'well_known_data': cm.encrypt(well_known_data.encode('utf8')), 'DATABASE_NAME': args.odb_db_name or args.sqlite_path, 'DATABASE_USER': args.odb_user or '', 'DATABASE_PASSWORD': cm.encrypt(odb_password), 'DATABASE_HOST': args.odb_host or '', 'DATABASE_PORT': args.odb_port or '', 'SITE_ID': django_site_id, 'SECRET_KEY': cm.encrypt(django_secret_key), 'ADMIN_INVOKE_NAME': 'admin.invoke', 'ADMIN_INVOKE_PASSWORD': cm.encrypt(admin_invoke_password), } for name in 'zato_secret_key', 'well_known_data', 'DATABASE_PASSWORD', 'SECRET_KEY', 'ADMIN_INVOKE_PASSWORD': config[name] = config[name].decode('utf8') open(os.path.join(repo_dir, 'logging.conf'), 'w').write( common_logging_conf_contents.format( log_path='./logs/web-admin.log')) open(web_admin_conf_path, 'w').write(config_template.format(**config)) open(initial_data_json_path, 'w').write(initial_data_json.format(**config)) # Initial info self.store_initial_info(self.target_dir, self.COMPONENTS.WEB_ADMIN.code) config = json.loads( open(os.path.join(repo_dir, 'web-admin.conf')).read()) config['config_dir'] = self.target_dir update_globals(config, self.target_dir) os.environ['DJANGO_SETTINGS_MODULE'] = 'zato.admin.settings' import django django.setup() self.reset_logger(args, True) # Can't import these without DJANGO_SETTINGS_MODULE being set from django.contrib.auth.models import User from django.db import connection from django.db.utils import IntegrityError call_command('migrate', run_syncdb=True, interactive=False, verbosity=0) call_command('loaddata', initial_data_json_path, verbosity=0) try: call_command('createsuperuser', interactive=False, username=user_name, first_name='admin-first-name', last_name='admin-last-name', email='*****@*****.**') admin_created = True user = User.objects.get(username=user_name) user.set_password(admin_password) user.save() except IntegrityError: # This will happen if user 'admin' already exists, e.g. if this is not the first cluster in this database admin_created = False connection._rollback() # Needed because Django took over our logging config self.reset_logger(args, True) if show_output: if self.verbose: msg = """Successfully created a web admin instance. You can start it with the 'zato start {path}' command.""".format( path=os.path.abspath( os.path.join(os.getcwd(), self.target_dir))) self.logger.debug(msg) else: self.logger.info('OK') # We return it only when told to explicitly so when the command runs from CLI # it doesn't return a non-zero exit code. if needs_admin_created_flag: return admin_created
def handle(self, *args, **options): if options["flush"]: call_command("flush", "--noinput") fake = Faker("hu_HU") users = [] if not User.objects.filter(username="******").exists(): admin = User() admin.username = "******" admin.is_superuser = True admin.is_staff = True admin.first_name = "Admin" admin.last_name = "Adminsson" admin.set_password("admin") admin.save() Profile.objects.create(user=User.objects.get(username="******")) users.append("admin") if not User.objects.filter(username="******").exists(): User.objects.create(username="******", is_staff=True, first_name="Mod", last_name="Mod") Profile.objects.create(user=User.objects.get(username="******")) users.append("mod") if not User.objects.filter(username="******").exists(): User.objects.create(username="******", first_name="User", last_name="User") Profile.objects.create(user=User.objects.get(username="******")) users.append("user") # Accounts for i in range(random.choice(range(3, 9))): # Users fakedata = fake.simple_profile() u = User() u.username = fakedata["username"] name = fakedata["name"].split(" ") u.first_name = name[0] u.last_name = name[1] u.save() # Profiles Profile.objects.create(user=u) users.append(u.username) # Tags tag_choices = [] for t in range(random.choice(range(5, 11))): tag_choices.append(fake.word()) # Questions for i in range(random.choice(range(3, 11))): ov_q = self.generate_owner_votes(users) q = Question() q.title = fake.text(max_nb_chars=50) q.text = fake.paragraph(nb_sentences=4) q.owner = Profile.objects.get(user__username=ov_q["owner"]) q.show_username = bool(random.getrandbits(1)) q.votes = ov_q["votes"] q.created_at = datetime.datetime.now() q.save() for t in random.sample(tag_choices, random.randint(0, len(tag_choices))): q.tags.add(t) # Comments for j in range(random.choice(range(0, 4))): ov_cq = self.generate_owner_votes(users) Comment.objects.create( parent_question=q, text=fake.paragraph(nb_sentences=4), owner=Profile.objects.get(user__username=ov_cq["owner"]), show_username=bool(random.getrandbits(1)), votes=ov_cq["votes"], ) # Answers accepted_answer = -1 answer_count = random.choice(range(0, 6)) has_accepted = bool(random.getrandbits(1)) if has_accepted and answer_count > 0: accepted_answer = random.choice(range(answer_count)) for k in range(answer_count): ov_a = self.generate_owner_votes(users) a = Answer() a.text = fake.paragraph(nb_sentences=4) a.owner = Profile.objects.get(user__username=ov_a["owner"]) a.show_username = bool(random.getrandbits(1)) if k == accepted_answer: a.is_accepted = True else: a.is_accepted = False a.votes = ov_a["votes"] a.parent = q a.save() # Comments to answers for l in range(random.choice(range(0, 4))): ov_ca = self.generate_owner_votes(users) Comment.objects.create( parent_answer=a, text=fake.paragraph(nb_sentences=4), owner=Profile.objects.get( user__username=ov_ca["owner"]), show_username=bool(random.getrandbits(1)), votes=ov_ca["votes"], )
def keygen(n): key = random.getrandbits(P) while(key % 2 == 0): key = random.getrandbits(P) return key
from random import getrandbits from ipaddress import IPv4Address import time, sys, random def print_slow(str): for letter in str: sys.stdout.write(letter) sys.stdout.flush() time.sleep(0.1) initial_sequence_number_host_a = random.randrange(100, 999) initial_sequence_number_host_b = random.randrange(100, 999) bits = getrandbits(32) host_a_source_addr = IPv4Address(bits) host_a_source_mac = RandMac("00:00:00:00:00:00") host_b_source_mac = RandMac("00:00:00:00:00:00") bits = getrandbits(32) host_b_source_addr = IPv4Address(bits) print("host a: sending an arp request to host b", host_b_source_addr) print("outgoing arp request:") print_slow("...............\n") time.sleep(1) print("Source hardware address:", host_a_source_mac) print("Source protocol address:", host_a_source_addr) print("Target hardware address: 00:00:00:00:00:00") print("Target protocol address:", host_b_source_addr)
def perform_health_checks(random, settings, test_runner, search_strategy): # Tell pytest to omit the body of this function from tracebacks __tracebackhide__ = True if not settings.perform_health_check: return if not Settings.default.perform_health_check: return health_check_random = Random(random.getrandbits(128)) # We "pre warm" the health check with one draw to give it some # time to calculate any cached data. This prevents the case # where the first draw of the health check takes ages because # of loading unicode data the first time. data = ConjectureData( max_length=settings.buffer_size, draw_bytes=lambda data, n, distribution: distribution(health_check_random, n) ) with Settings(settings, verbosity=Verbosity.quiet): try: test_runner(data, reify_and_execute( search_strategy, lambda *args, **kwargs: None, )) except BaseException: pass count = 0 overruns = 0 filtered_draws = 0 start = time.time() while ( count < 10 and time.time() < start + 1 and filtered_draws < 50 and overruns < 20 ): try: data = ConjectureData( max_length=settings.buffer_size, draw_bytes=lambda data, n, distribution: distribution(health_check_random, n) ) with Settings(settings, verbosity=Verbosity.quiet): test_runner(data, reify_and_execute( search_strategy, lambda *args, **kwargs: None, )) count += 1 except UnsatisfiedAssumption: filtered_draws += 1 except StopTest: if data.status == Status.INVALID: filtered_draws += 1 else: assert data.status == Status.OVERRUN overruns += 1 except InvalidArgument: raise except Exception: escalate_hypothesis_internal_error() if ( HealthCheck.exception_in_generation in settings.suppress_health_check ): raise report(traceback.format_exc()) if test_runner is default_new_style_executor: fail_health_check( settings, 'An exception occurred during data ' 'generation in initial health check. ' 'This indicates a bug in the strategy. ' 'This could either be a Hypothesis bug or ' "an error in a function you've passed to " 'it to construct your data.', HealthCheck.exception_in_generation, ) else: fail_health_check( settings, 'An exception occurred during data ' 'generation in initial health check. ' 'This indicates a bug in the strategy. ' 'This could either be a Hypothesis bug or ' 'an error in a function you\'ve passed to ' 'it to construct your data. Additionally, ' 'you have a custom executor, which means ' 'that this could be your executor failing ' 'to handle a function which returns None. ', HealthCheck.exception_in_generation, ) if overruns >= 20 or ( not count and overruns > 0 ): fail_health_check(settings, ( 'Examples routinely exceeded the max allowable size. ' '(%d examples overran while generating %d valid ones)' '. Generating examples this large will usually lead to' ' bad results. You should try setting average_size or ' 'max_size parameters on your collections and turning ' 'max_leaves down on recursive() calls.') % ( overruns, count ), HealthCheck.data_too_large) if filtered_draws >= 50 or ( not count and filtered_draws > 0 ): fail_health_check(settings, ( 'It looks like your strategy is filtering out a lot ' 'of data. Health check found %d filtered examples but ' 'only %d good ones. This will make your tests much ' 'slower, and also will probably distort the data ' 'generation quite a lot. You should adapt your ' 'strategy to filter less. This can also be caused by ' 'a low max_leaves parameter in recursive() calls') % ( filtered_draws, count ), HealthCheck.filter_too_much) runtime = time.time() - start if runtime > 1.0 or count < 10: fail_health_check(settings, ( 'Data generation is extremely slow: Only produced ' '%d valid examples in %.2f seconds (%d invalid ones ' 'and %d exceeded maximum size). Try decreasing ' "size of the data you're generating (with e.g." 'average_size or max_leaves parameters).' ) % (count, runtime, filtered_draws, overruns), HealthCheck.too_slow, )
def encrypt(key, aBit): q = random.getrandbits(Q) m_a = 2 * random.getrandbits(N - 1) c = key * q + m_a + aBit return c
def data_generator(batch_size, agent: Agent, opponents, model, gamma=0.92, move_penalty=-0.002): """generates data for the model Arguments: batch_size (int): how many datapoints should be generated agent (Agent): the main agent for the data opponents (list): the opponents for the agent model (Model): the model that should be used to evaluate states gamma (float): the discount factor for the added value estimate of the next state move_penalty (float): the reward that should be added to each move Returns: ndarray: the game states (after an action) ndarray: the computed values of the states """ env = gym.make('Bao-v0') while True: X = [] y = [] while len(X) < batch_size: env.reset() opponent = choice(opponents) if getrandbits(1) == 1: current_agent = agent # the main agent goes first waiting_agent = opponent else: current_agent = opponent waiting_agent = agent states_after_action = [[], []] while not env.done: state = np.copy(env.state) current_player = env.current_player next_state, reward, done, _ = env.step( current_agent.move(state, get_available_actions(state=env.state))) states_after_action[current_player].append(next_state) current_agent, waiting_agent = waiting_agent, current_agent # Compute state values (The states are the results of actions.) values = [ np.full(fill_value=move_penalty, shape=(len(states_after_action[0]), ), dtype=np.float), np.full(fill_value=move_penalty, shape=(len(states_after_action[1]), ), dtype=np.float) ] outcome = env.outcome values[0][-1] += -outcome * 10 values[1][-1] += outcome * 10 # Add estimates of the new states to every but the last states (=> [:-1]) flipped_states = [[ flip_board(state) for state in states_after_action[i][:-1] ] for i in range(2)] possible_states = [[[ np.reshape(get_board_after_action(action, state), newshape=(32, )) for action in get_available_actions(state) ] for state in flipped_states[i]] for i in range(2)] estimates = [[ np.reshape(model.predict(encode_states(x, ARCHITECTURE)), newshape=(-1, )) for x in possible_states[i] ] for i in range(2)] # TODO: optimize next_state_values = [ np.asarray([-np.max(x) for x in estimates[i]], dtype=np.float) for i in range(2) ] for z in range(2): values[z][:-1] += gamma * next_state_values[z] X.extend([ np.reshape(state, newshape=(32, )) for state in states_after_action[z] ]) y.extend(values[z]) X = encode_states(X, ARCHITECTURE) X, y = shuffle(X, np.asarray(y), random_state=RANDOM_STATE) yield X[:batch_size], y[:batch_size]
def new_random(): import random return random.Random(random.getrandbits(128))
def encoding_categorical_feature(dataset_dict: dict, feature_name: str, print_results: Union[bool, int] = True, print_counter: int = 0) -> dict: """ Single categorical feature string encoder This function encodes categorical features. It is possible to use train data alone or all train data, validation data and test data. If all datesets are provided (i.e. train, valid and test), they will be concatenated first and then encoded. :param int print_counter: if print_results is int, print counter control printing data to the conosle based on the print_results value. :param Union[bool, int] print_results: If False, no data is printed to the console. If True, all data is printed to the console. If an integer n, only the data for n features is printed to the console. :param str feature_name: The name of the feature/column that its values should be encoded. :param dict dataset_dict: a dictionary of pandas series (i.e one column) that must contain the train data and optionally contains valid data and test data :return: dataset_dict_encoded: a dictionary of pandas series (i.e one column) after encoding. """ # Replacing the missing values with a special hash value to avoid having the same class of missing values and # non-missing values. hash_missing_value = hex(random.getrandbits(128)) logger.debug(f"The hash for the missing values is {hash_missing_value}") # check printing if isinstance(print_results, bool) and print_results: print(f"there are {len(dataset_dict)} datasets provided") elif isinstance(print_results, int): if print_counter < print_results: print(f"there are {len(dataset_dict)} datasets provided") # Concatenate datasets valid_dataset_list = [] valid_dataset_keys = [] for key_i, dataseries in dataset_dict.items(): if dataseries.shape[0] > 0: valid_dataset_list.append(dataseries) # get the dataframes valid_dataset_keys.append(key_i) # get the keys if len(valid_dataset_list) > 1: x_original = pd.concat(valid_dataset_list, axis=0) elif len(valid_dataset_list) == 1: x_original = valid_dataset_list[0] else: raise ValueError("No valid dataset was provided") # define the encoder label_encoder = preprocessing.LabelEncoder() label_encoder.fit(x_original.fillna(hash_missing_value)) dataset_dict_encoded = {} # encoding loop for dataset_key in valid_dataset_keys: dataset_dict_encoded[dataset_key] = label_encoder.transform( dataset_dict[dataset_key].fillna(hash_missing_value)) labels_nr = len(list(label_encoder.classes_)) if isinstance(print_results, bool) and print_results: print(f"encoding the feature in the dataset {dataset_key}") print( f"the number of classes in {feature_name} feature is: {labels_nr}" ) elif isinstance(print_results, int): if print_counter < print_results: print(f"encoding the feature in the dataset {dataset_key}") print( f"the number of classes in {feature_name} feature is: {labels_nr}" ) logger.info( f"Encoding categorical feature {feature_name} process is finished!") return dataset_dict_encoded
def random_hash(): hash_str = random.getrandbits(128) return "%032x" % hash_str
def run_test(self): q = 73829871667027927151400291810255409637272593023945445234219354687881008052707 pow2 = 2**256 self.description = "Covers the 'Wrapped Serials Attack' scenario." self.init_test() INITAL_MINED_BLOCKS = 351 # Blocks mined before minting MORE_MINED_BLOCKS = 31 # Blocks mined after minting (before spending) DENOM_TO_USE = 1000 # zc denomination used for double spending attack K_BITSIZE = 128 # bitsize of the range for random K NUM_OF_K = 5 # number of wrapping serials to try # 1) Start mining blocks self.log.info("Mining %d first blocks..." % INITAL_MINED_BLOCKS) self.node.generate(INITAL_MINED_BLOCKS) sleep(2) # 2) Mint zerocoins self.log.info("Minting %d-denom zLTVs..." % DENOM_TO_USE) balance = self.node.getbalance("*", 100) assert_greater_than(balance, DENOM_TO_USE) total_mints = 0 while balance > DENOM_TO_USE: try: self.node.mintzerocoin(DENOM_TO_USE) except JSONRPCException: break sleep(1) total_mints += 1 self.node.generate(1) sleep(1) if total_mints % 5 == 0: self.log.info("Minted %d coins" % total_mints) if total_mints >= 20: break balance = self.node.getbalance("*", 100) sleep(2) # 3) Mine more blocks and collect the mint self.log.info("Mining %d more blocks..." % MORE_MINED_BLOCKS) self.node.generate(MORE_MINED_BLOCKS) sleep(2) mint = self.node.listmintedzerocoins(True, True)[0] # 4) Get the raw zerocoin data exported_zerocoins = self.node.exportzerocoins(False) zc = [x for x in exported_zerocoins if mint["serial hash"] == x["id"]] if len(zc) == 0: raise AssertionError("mint not found") # 5) Spend the minted coin (mine two more blocks) self.log.info("Spending the minted coin with serial %s and mining two more blocks..." % zc[0]["s"]) txid = self.node.spendzerocoinmints([mint["serial hash"]])['txid'] self.log.info("Spent on tx %s" % txid) self.node.generate(2) sleep(2) # 6) create the new serials new_serials = [] for i in range(NUM_OF_K): K = random.getrandbits(K_BITSIZE) new_serials.append(hex(int(zc[0]["s"], 16) + K*q*pow2)[2:]) randomness = zc[0]["r"] privkey = zc[0]["k"] # 7) Spend the new zerocoins for serial in new_serials: self.log.info("Spending the wrapping serial %s" % serial) tx = None try: tx = self.node.spendrawzerocoin(serial, randomness, DENOM_TO_USE, privkey) except JSONRPCException as e: exc_msg = str(e) if exc_msg == "CoinSpend: failed check (-4)": self.log.info("GOOD: Transaction did not verify") else: raise e if tx is not None: self.log.warning("Tx is: %s" % tx) raise AssertionError("TEST FAILED") self.log.info("%s PASSED" % self.__class__.__name__)
def findGoodPrime(numBits=512): candidate = 1 while not goodPrime(candidate): candidate = random.getrandbits(numBits) return candidate