def test(): import _sha512 a_str = "just a test string" assert _sha512.sha512().hexdigest() == sha512().hexdigest() assert _sha512.sha512(a_str).hexdigest() == sha512(a_str).hexdigest() assert _sha512.sha512(a_str*7).hexdigest() == sha512(a_str*7).hexdigest() s = sha512(a_str) s.update(a_str) assert _sha512.sha512(a_str+a_str).hexdigest() == s.hexdigest()
def test(): import _sha512 a_str = b"just a test string" assert _sha512.sha512().hexdigest() == sha512().hexdigest() assert _sha512.sha512(a_str).hexdigest() == sha512(a_str).hexdigest() assert _sha512.sha512(a_str * 7).hexdigest() == sha512(a_str * 7).hexdigest() s = sha512(a_str) s.update(a_str) assert _sha512.sha512(a_str + a_str).hexdigest() == s.hexdigest()
def OnButtonCheck1(self, event): src = str(self.inputN.GetValue().encode('utf8')) m1 = md5.new() m1.update(src) self.Md5.SetValue(m1.hexdigest().decode('utf8')) m2 = _sha.new() m2.update(src) self.sha1.SetValue(m2.hexdigest().decode('utf8')) m3 = _sha256.sha224() m3.update(src) self.sha224.SetValue(m3.hexdigest().decode('utf8')) m4 = _sha256.sha256() m4.update(src) self.sha256.SetValue(m4.hexdigest().decode('utf8')) m5 = _sha512.sha384() m5.update(src) self.sha384.SetValue(m5.hexdigest().decode('utf8')) m6 = _sha512.sha512() m6.update(src) self.sha512.SetValue(m6.hexdigest().decode('utf8'))
def OnButtonCheckED(self, event): c = 'utf8' dlg = str(self.inputD.GetValue()) with open(dlg, 'rb') as EDfile: p = EDfile.read() src = str(p) m1 = md5.new() m1.update(src) self.Md5.SetValue(m1.hexdigest().decode(c)) m2 = _sha.new() m2.update(src) self.sha1.SetValue(m2.hexdigest().decode(c)) m3 = _sha256.sha224() m3.update(src) self.sha224.SetValue(m3.hexdigest().decode(c)) m4 = _sha256.sha256() m4.update(src) self.sha256.SetValue(m4.hexdigest().decode(c)) m5 = _sha512.sha384() m5.update(src) self.sha384.SetValue(m5.hexdigest().decode(c)) m6 = _sha512.sha512() m6.update(src) self.sha512.SetValue(m6.hexdigest().decode(c))
def OnButtonCheckED(self, event): c = 'utf8' dlg = str(self.inputD.GetValue()) with open(dlg,'rb') as EDfile: p = EDfile.read() src = str(p) m1 = md5.new() m1.update(src) self.Md5.SetValue(m1.hexdigest().decode(c)) m2 = _sha.new() m2.update(src) self.sha1.SetValue(m2.hexdigest().decode(c)) m3 = _sha256.sha224() m3.update(src) self.sha224.SetValue(m3.hexdigest().decode(c)) m4 = _sha256.sha256() m4.update(src) self.sha256.SetValue(m4.hexdigest().decode(c)) m5 = _sha512.sha384() m5.update(src) self.sha384.SetValue(m5.hexdigest().decode(c)) m6 = _sha512.sha512() m6.update(src) self.sha512.SetValue(m6.hexdigest().decode(c))
def generateVerificationCodeChallenge(verification_code: str) -> str: """ Generates a base64 encoded sha512 encrypted version of a given string. :param verification_code: :return: The encrypted code in base64 format. """ encoded = sha512(verification_code.encode()).digest() return b64encode(encoded, altchars=b"_-").decode()
def create_browser_id(): agent = request.headers.get('User-Agent') if not agent: agent = str(agent).encode('utf-8') base_str = "%s|%s" % (get_remote_addr(), agent) h = sha512() h.update(base_str.encode('utf8')) return h.hexdigest()
def test_sha512_sanity(self): x = _sha512.sha512() self.assertEqual(x.block_size, 128) self.assertEqual(x.digest(), "\xcf\x83\xe15~\xef\xb8\xbd\xf1T(P\xd6m\x80\x07\xd6 \xe4\x05\x0bW\x15\xdc\x83\xf4\xa9!\xd3l\xe9\xceG\xd0\xd1<]\x85\xf2\xb0\xff\x83\x18\xd2\x87~\xec/c\xb91\xbdGAz\x81\xa582z\xf9'\xda>") self.assertEqual(x.digest_size, 64) self.assertEqual(x.digest_size, x.digestsize) self.assertEqual(x.hexdigest(), 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e') self.assertEqual(x.name, "SHA512") x.update("abc") self.assertEqual(x.hexdigest(), 'ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f') x_copy = x.copy() self.assertTrue(x!=x_copy) self.assertEqual(x.hexdigest(), x_copy.hexdigest())
def _create_identifier(): """ To create a session id using the user agent, remote addr Encodes the generated using sha512 Adds a random unique ID with encoded data :return: """ user_agent = request.headers.get('User-Agent') if user_agent is not None: user_agent = user_agent.encode('utf-8') base = '{0}|{1}'.format(_get_remote_addr(), user_agent) if str is bytes: base = text_type(base, 'utf-8', errors='replace') # pragma: no cover h = sha512() h.update(base.encode('utf8')) return h.hexdigest() + str(uuid.uuid4())
def test_sha512_sanity(): x = _sha512.sha512() AreEqual(x.block_size, 128) AreEqual(x.digest(), "\xcf\x83\xe15~\xef\xb8\xbd\xf1T(P\xd6m\x80\x07\xd6 \xe4\x05\x0bW\x15\xdc\x83\xf4\xa9!\xd3l\xe9\xceG\xd0\xd1<]\x85\xf2\xb0\xff\x83\x18\xd2\x87~\xec/c\xb91\xbdGAz\x81\xa582z\xf9'\xda>") AreEqual(x.digest_size, 64) AreEqual(x.digest_size, x.digestsize) AreEqual(x.hexdigest(), 'cf83e1357eefb8bdf1542850d66d8007d620e4050b5715dc83f4a921d36ce9ce47d0d13c5d85f2b0ff8318d2877eec2f63b931bd47417a81a538327af927da3e') AreEqual(x.name, "SHA512") x.update("abc") AreEqual(x.hexdigest(), 'ddaf35a193617abacc417349ae20413112e6fa4e89a97ea20a9eeee64b55d39a2192992a274fc1a836ba3c23a3feebbd454d4423643ce80e2a9ac94fa54ca49f') x_copy = x.copy() Assert(x!=x_copy) AreEqual(x.hexdigest(), x_copy.hexdigest())
def password_generator(credentials, n): # Generates sha512 hash hash = _sha512.sha512(credentials.encode()).hexdigest() lst = [] # Pairs every two hexadecimal numbers. # Discards values greater than 196 and normalizes # remaining values in range 32 to 128 for # easy conversation to ascii. # Saves values in lst. for i in range(0,128,2): j = int(hash[i]+hash[i+1], 16) if 96 <= j < 192: lst.append(j - 66) elif j < 96: lst.append(j+30) # If number of usable values is less than needed # then start again with hash as new credentials. # If false output list will have charecters insted of numbers. if (len(lst) < n): lst = password_generator(hash, n) # Converts integers in list to their ASCII eqvivalant # and discards values after 'n' index. if type(lst[0]) != str: for i in range(n): lst[i] = str(chr(lst[i])) lst = lst[:n] # Checks if password have upeercase, lowercase, # digit and special charecters. # If any is missing start again with hash as new input. if not (any(c.islower() for c in lst) and any(c.isupper() for c in lst) and any(c.isdigit() for c in lst) and any(isspecial(c) for c in lst)): lst = password_generator(hash, n) return lst
def __construct_merkel_tree(self, transactions): onward_transactions = [] temp = None flag = True if transactions.__len__() == 1: self.__merkel_root = transactions[0] return if not transactions.__len__() % 2 == 0: transactions.append(transactions[-1]) for transaction in transactions: if flag: temp = transaction flag = False else: flag = True string = temp.get_data() + transaction.get_data() data = sha512(string).hexdigest() node = MerkelNode(data) node.__left_child = temp node.__right_child = transaction onward_transactions.append(node) self.__construct_merkel_tree(onward_transactions)
def gen_fn_hash(s): return sha512(s).hexdigest()
def main(): global desiredWorkload sha512 = _sha512.sha512() #Adjust desiredWorkload percentages to sum to 1 sumPercentages = 0 for workload in desiredWorkload: sumPercentages += desiredWorkload[workload]["traffic_percentage"] for workload in desiredWorkload: desiredWorkload[workload]["traffic_percentage"] /= sumPercentages del sumPercentages trafficJson = {"input": desiredWorkload, "workload": {}, "packets_n_sizes_formatting": {}} #For each desired application traffic for workload in desiredWorkload: #Create a new entry if one doesn't exist yet if workload not in trafficJson["workload"]: trafficJson["workload"][workload] = {"applications": {}} #Create traffic associated with a given application output = workload_generator[workload](**desiredWorkload[workload]["application_config"]) #Calculate hash for the application outputJson = json.dumps(output) sha512.update(outputJson.encode()) outputSha = sha512.digest().__repr__() #Use the hash to address it trafficJson["workload"][workload]["applications"][outputSha] = output del outputJson #Convert whatever format it came from the charge_generator to a universal format for traffic injectors last_output_universal_traffic_format = {} time_accumulator = 0 if workload == "iot": for i in range(len(output[0]["time_to_send"])): # Determine size of packet and time offset for when it should be transmitted last_output_universal_traffic_format[output[0]["time_to_send"][i]] = int(output[0]["packet_size"] + 1) pass pass elif workload == "voip": last_output_universal_traffic_format[time_accumulator] = int(output[0]["packet_size"]+1) for i in range(1, len(output[0]["time_between_packets"])): # Determine size of packet and time offset for when it should be transmitted last_output_universal_traffic_format[time_accumulator] = int(output[0]["packet_size"]+1) time_accumulator += output[0]["time_between_packets"][i - 1] pass pass elif workload == "stream": #Deal with first element and then proceed to others last_output_universal_traffic_format[time_accumulator] = int((output[0]["time_downloading"][0] * ( output[0]["video_codification"] / 8)) + 1) # time downloading * byte rate for i in range(1, len(output[0]["time_downloading"])): #Determine size of packet and time offset for when it should be transmitted last_output_universal_traffic_format[time_accumulator] = int((output[0]["time_downloading"][i]*(output[0]["video_codification"]/8))+1) #time downloading * byte rate time_accumulator += output[0]["time_between_segments"][i-1] pass pass elif workload == "web": for i in range(len(output[0]["series"])): #Determine size of packet and time offset for when it should be transmitted last_output_universal_traffic_format[i] = int(output[0]["series"][i]+1) #bytes to download pass pass else: print("Something went pretty darn wrong") return -1 trafficJson["packets_n_sizes_formatting"][(outputSha, workload)] = last_output_universal_traffic_format #Measure how many bytes were transmitted per application trafficJson["workload"][workload]["applications"][outputSha][0]["totalBytes"] = sum(last_output_universal_traffic_format.values()) trafficJson["workload"][workload]["applications"][outputSha][0]["duration"] = sorted(list(last_output_universal_traffic_format.keys()))[-1] del outputSha, output, time_accumulator, last_output_universal_traffic_format del i, sha512 #Measure overall traffic percentages for both applications and traffic workloads trafficTotalBytes = 0 trafficTotalDuration = 0 for workload in desiredWorkload: workloadTotalBytes = 0 workloadTotalDuration = 0 for application in trafficJson["workload"][workload]["applications"]: traffic = trafficJson["workload"][workload]["applications"][application][0]["totalBytes"] duration = trafficJson["workload"][workload]["applications"][application][0]["duration"] workloadTotalBytes += traffic workloadTotalDuration += duration trafficTotalBytes += workloadTotalBytes trafficTotalDuration += workloadTotalDuration trafficJson["workload"][workload]["totalBytes"] = workloadTotalBytes trafficJson["workload"][workload]["duration"] = workloadTotalDuration del traffic maxDuration = 0 for workload in desiredWorkload: for application in trafficJson["workload"][workload]["applications"]: trafficJson["workload"][workload]["applications"][application][0]["traffic_percentage"] = trafficJson["workload"][workload]["applications"][application][0]["totalBytes"]/trafficTotalBytes trafficJson["workload"][workload]["trafficPercentage"] = trafficJson["workload"][workload]["totalBytes"]/trafficTotalBytes maxDuration = trafficJson["workload"][workload]["duration"] if trafficJson["workload"][workload]["duration"] > maxDuration else maxDuration # Find application/workload with biggest gap to the expected traffic biggestPercentageGap = (0, None) for workload in desiredWorkload: gap = desiredWorkload[workload]["traffic_percentage"] - trafficJson["workload"][workload][ "trafficPercentage"] if gap > biggestPercentageGap[0]: biggestPercentageGap = (gap, workload) #Adjust overall traffic to fit specified percentages iterations = 0 while(biggestPercentageGap[0] > 0.001): iterations += 1 workload = biggestPercentageGap[1] #If the application duration is not similar to the one with the maximum duration, randomly select a package from the already existing and append it if trafficJson["workload"][workload]["duration"] < (maxDuration*0.9): application = list(trafficJson["workload"][workload]["applications"])[0] packets = sorted(list(trafficJson["packets_n_sizes_formatting"][(application, workload)].items())) packets_to_copy = [int(random.gauss(len(packets), len(packets)) %len(packets)) for _ in range(10)] lastPacket = packets[-1] totalBytes = 0 duration = 0 for packet in packets_to_copy: currPacket = packets[packet] prevPacket = packets[(packet-1) % len(packets)] newPacket = (lastPacket[0]+(currPacket[0]-prevPacket[0]), currPacket[1]) trafficJson["packets_n_sizes_formatting"][(application, workload)][newPacket[0]] = newPacket[1] packets.append(newPacket) totalBytes += newPacket[1] duration = newPacket[0] print() pass del newPacket, lastPacket, prevPacket, packets_to_copy, packets, currPacket, packet # Add sent bytes and replace with the new number trafficJson["workload"][workload]["totalBytes"] += totalBytes trafficJson["workload"][workload]["duration"] += duration # Update the total traffic bytes trafficTotalBytes += totalBytes # Recalculate the percentages for each workflow after the changes for workload in desiredWorkload: trafficJson["workload"][workload]["trafficPercentage"] = trafficJson["workload"][workload]["totalBytes"] / trafficTotalBytes pass pass del duration, application, totalBytes #If the application duration is similar to the one with the maximum duration, adjust packet sizes else: #assuming a single application per workload type multiplication_factor = desiredWorkload[workload]["traffic_percentage"]/trafficJson["workload"][workload]["trafficPercentage"] application = list(trafficJson["workload"][workload]["applications"])[0] totalBytes = 0 #Adjust the byte size of packages by a multiplication factor for time_to_send in trafficJson["packets_n_sizes_formatting"][(application, workload)]: trafficJson["packets_n_sizes_formatting"][(application, workload)][time_to_send] *= multiplication_factor trafficJson["packets_n_sizes_formatting"][(application, workload)][time_to_send] += 1 trafficJson["packets_n_sizes_formatting"][(application, workload)][time_to_send] = int(trafficJson["packets_n_sizes_formatting"][(application,workload)][time_to_send]) totalBytes += trafficJson["packets_n_sizes_formatting"][(application, workload)][time_to_send] #Remove application old sent bytes and replace with the new number trafficJson["workload"][workload]["totalBytes"] -= trafficJson["workload"][workload]["applications"][application][0]["totalBytes"] trafficJson["workload"][workload]["totalBytes"] += totalBytes #Update the total traffic bytes #trafficJson["workload"][workload]["applications"][application][0]["totalBytes"] = totalBytes trafficTotalBytes -= trafficJson["workload"][workload]["applications"][application][0]["totalBytes"] trafficTotalBytes += totalBytes #Recalculate the percentages for each workflow after the changes for workload in desiredWorkload: trafficJson["workload"][workload]["trafficPercentage"] = trafficJson["workload"][workload]["totalBytes"]/trafficTotalBytes pass del multiplication_factor, application, totalBytes, time_to_send pass # Find application/workload with biggest gap to the expected traffic biggestPercentageGap = (0, None) for workload in desiredWorkload: gap = desiredWorkload[workload]["traffic_percentage"] - trafficJson["workload"][workload]["trafficPercentage"] if gap > biggestPercentageGap[0]: biggestPercentageGap = (gap, workload) pass del workload #Sort packet_n_sizes_formatting before dumping unsorted_traffic = trafficJson["packets_n_sizes_formatting"] sorted_traffic = {} for key, traffic in unsorted_traffic.items(): sorted_traffic[key[0]] = sorted(list(traffic.items())) trafficJson["packets_n_sizes_formatting_sorted"] = sorted_traffic #Dump everything to a pickle with open("trafficJson.pickle", "wb") as fd: pickle.dump(trafficJson, fd) #And only the sorted and formatted traffic for the JSON with open("trafficJson.json", "w") as fd: json.dump(trafficJson["packets_n_sizes_formatting_sorted"], fd) print() pass
def post_parola_degistir(self): """Parola degistirme ekrani""" user = User.query.filter_by(id=current_user.get_id()).one() parola_form = PasswordChangeForm(request.form) if parola_form.validate(): old_password = sha512( parola_form.old_password.data.encode()).hexdigest() new_password = sha512( parola_form.new_password.data.encode()).hexdigest() if current_user.password == old_password: try: user.password = new_password DB.session.commit() flash(_("Parolanız güncellendi")) except SQLAlchemyError as error: DB.session.rollback() flash( _("Parolanız güncellenirken bir hata oluştu. Girdiğiniz bilgileri lütfen kontrol ediniz." )) CustomErrorHandler.error_handler() signal_payload = { "message_type": USER_ACTIVITY_MESSAGES.get("common").get( "parola_degisikligi").type_index, "ekstra_mesaj": "{} adlı kullanıcı parolasini degistirdi.".format( current_user.username) } signal_sender(**signal_payload) else: flash( _("Parolanız güncellenirken bir hata oluştu. Girdiğiniz bilgileri lütfen kontrol ediniz." )) signal_payload = { "message_type": USER_ACTIVITY_MESSAGES.get("common").get( "hatali_parola").type_index, "ekstra_mesaj": "{} adlı kullanıcı parolasini degistirirken" "Eski parolasini hatali girdi.".format( current_user.username) } signal_sender(**signal_payload) else: signal_payload = { "message_type": USER_ACTIVITY_MESSAGES.get("common").get( "parola_degistirirken_hata").type_index, "ekstra_mesaj": "{} adlı kullanıcı parolasini degistiremedi.".format( current_user.username) } signal_sender(**signal_payload) flash( _("Parolanız güncellenirken bir hata oluştu. Girdiğiniz bilgileri lütfen kontrol ediniz." )) return render_template( "parola.html", parola_form=parola_form, user=user, avatar_url=self.avatar, )
return False return True def gen_prime(l, r): n = random.randint(l, r) while not isPrime(n): n = random.randint(l, r) return n def generate_keys(): p = gen_prime(200, 300) q = gen_prime(300, 400) n = p * q e = 101 # Здесь должен быть обратный эл-т в кольце по модулю! d = 1 while (e * d) % n != 1: d += 1 return PublicKey(e, n), PrivateKey(d, n) public, private = generate_keys() print("Address of our BLOCKCHAIN is ", sha512(public.dumps().encode()).hexdigest()) print("Save this pls", public.dumps()) print("Save this pls and keep it secret", private.dumps())
from ._util import PY2, PY33 if PY2: import _md5 import _sha import _sha256 import _sha512 md5 = type(_md5.new()) sha1 = type(_sha.new()) sha224 = type(_sha256.sha224()) sha256 = type(_sha256.sha256()) sha384 = type(_sha512.sha384()) sha512 = type(_sha512.sha512()) elif PY33: import _md5 import _sha1 import _sha256 import _sha512 md5 = type(_md5.md5()) sha1 = type(_sha1.sha1()) sha224 = type(_sha256.sha224()) sha256 = type(_sha256.sha256()) sha384 = type(_sha512.sha384()) sha512 = type(_sha512.sha512()) HASH = type(_hashlib.new('md5'))
def _genHashString(self): """Method outside the Database to Genrate HashString for Code and Token values.""" return sha512(uuid4().hex).hexdigest()[0:OAUTH2_ACCESS_TOKEN_LENGTH]
class Blockchain: seed_hash = sha512("Genesis669666999".encode()).hexdigest() node_list = {} # The default constructor function for this class def __init__(self, chain=None, all_transactions=None): if all_transactions is None: all_transactions = [] if chain is None: chain = [] self.chain = chain self.all_transactions = all_transactions self.node_list = { "node1": 35500, "node2": 31000, "node3": 36500, "node4": 38000, "node5": 38500, "node6": 40000, "node7": 41000 } # The genesis block which will be the starting of the chain; a parent block in some way def block_zero(self, node_id): # The genesis or zero block is initialized with a special key that is to be kept confidential # The previous hash for the Gen block is 0 block = Block.Block([self.seed_hash], 0) self.chain.append(block) if self.validate_block(node_id): print("\nGENESIS BLOCK MINED!\n") else: print("\nGENESIS BLOCK CANNOT BE MINED!\n") return [self.chain, self.seed_hash] # The function that adds new blocks to the existing chain def add_block(self, transactions, node_id): import pymysql.cursors mysql_conn = pymysql.connect(host="localhost", user="******", password="******", db="user_information", charset='utf8mb4', cursorclass=pymysql.cursors.DictCursor) previous_block_hash = "" print(self.chain, len(self.chain)) try: with mysql_conn.cursor() as cursor: sql = "select blockname from %s" % (node_id) print(sql) cursor.execute(sql) block_height = len(cursor.fetchall()) mysql_conn.commit() with mysql_conn.cursor() as cursor: sql = "select blockname from %s" % node_id + " limit %s, %s" print(type(block_height)) cursor.execute(sql, (block_height - 1, block_height)) finally: mysql_conn.close() new_block = Block.Block(transactions, previous_block_hash) self.chain.append(new_block) return new_block # The function that validates the new blocks by comparing hashes def validate_block(self, node_id): import socket import pickle # Multicast the data to all blocks except the sender, for validation nodes = ["node1", "node2", "node3", "node4", "node5", "node6", "node7"] # you can send objects using pickle files dumping_variable = pickle.dumps(self.chain) chain_object = dumping_variable result = True print(nodes) for node in nodes: server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) if node != node_id: port = self.node_list[node] # print(port) try: server_socket.connect(("127.0.0.1", port)) server_socket.send(bytes(node_id.encode())) m1 = server_socket.recv(1024) server_socket.send(chain_object) m1 = server_socket.recv(1024) server_socket.send(bytes(str(self.seed_hash).encode())) result_string = str(server_socket.recv(1024)) result_from_one_node = False if str(result_string) == "b\'True\'": result_from_one_node = True print("Message received: ", result_from_one_node) result &= result_from_one_node server_socket.close() except ConnectionRefusedError as ce: print("In Blockchain: ", ce.strerror, ce.__traceback__) print("Out of the loop") return result # The function that will print the whole blockchain def print_block_chain(self): for block_index in range(len(self.chain)): current_block = self.chain[block_index] print("Block {} {} ".format(block_index, current_block)) current_block.print_block_content()
def get_query_tag(self, query_override=None): """ Creates a unique tag for these parameters.""" url = self.Urls.task.format(**self.query.get_url_kwargs()) query_params = self.get_parameters(query_override=query_override) key = json.dumps((self.query_id, url, query_params), ensure_ascii=True, sort_keys=True).encode('ascii') return sha512(key).hexdigest()[:36]