def successor(self, key): if key == self.key: return self.port nearest_node = self.port for finger in self.fingertable: if Hash(finger['successor']) > self.key: if key > self.key and key <= Hash(finger['successor']): return finger['successor'] else: nearest_node = finger['successor'] continue else: if Hash(finger['successor']) == self.key: if self.fingertable[0]['successor'] == self.port: return self.port else: break if key > self.key or key <= Hash(finger['successor']): return finger['successor'] else: nearest_node = finger['successor'] continue if nearest_node == self.port: return fingertable[0]['successor'] else: #forward to nearest_node return self.fwd_successor_request(key, nearest_node)
def add(self, login, password, passwordMatch): """ Adds user. :param login: string :param password: string :param passwordMatch: string :return: string """ v = Validation() if v.validateLogin(login=login): if v.validatePassword(password): if v.validatePasswordMatch(password, passwordMatch): h = Hash() salt = h.generateSalt() hashedPassword = h.encryptPassword(password, salt) result = self.db.addUser(login, hashedPassword, salt) if result == 'login_exist': return "Login already exists." else: if result == True: return "Account created successfully." else: return f"Database error occurred. Check {cfg.log['output']} file." else: return "Passwords do not match." else: return "Password is too weak. Password should have at least 8 characters and one character of each type: lowercase, uppercase, digit and special character." else: return "Login should have between 8 - 16 characters."
def calculateHash(current_user): data = request.get_json() print(data) new_hash = Hash(data=data['data'], algo=data['algo'], iteration=data["iteration"]) return jsonify({'hash': new_hash.hash()})
def test_errors(): ht = Hash() with pytest.raises(KeyError): ht.get("key") with pytest.raises(TypeError): ht.set(5, 5)
def get_file(self, file): filehash = Hash(file) print("Requested file name:", file) print("Requested file hash:", filehash) print("Locating file...") succ_of_filehash = self.successor(filehash) print("File should be on node:", Hash(succ_of_filehash)) print("Requesting node for file...") self.request_file(file, succ_of_filehash)
def _getPendingTxsHashes(): hash = Hash() print(hash.getPendingTxsHashes()) for hash in hash.getPendingTxsHashes(): dbPush.set_tx( Transaction(hash=hash[0], timestamp=hash[1], gasLimit=-1, gasPrice=-1))
def __init__(self, args): ''' sum of step is: 4:0-4 :param n_step: ''' self.n_step = args.n_step self.pool_size = args.pool_size self.args = args self.hash = Hash(hash_size=self.args.hash_size) self.schedule()
def test_string(self): data = os.urandom(32) try: h = Hash(data) data2 = h.bytes() h2 = Hash(data) assert h == h2 except Exception as e: print("exception thrown: {}".format(e)) assert 1 == 0
def hash_4096(): start = time() hash_4096 = Hash(4096) for word in words: hash_4096.set(word, word) end = time() print "4096 Fixture: ", end - start return hash_4096
def hash_1024(): start = time() hash_1024 = Hash(1024) for word in words: hash_1024.set(word, word) end = time() print "1024 Fixture: ", end - start return hash_1024
def hash_512(): start = time() hash_512 = Hash(512) for word in words: hash_512.set(word, word) end = time() print "512 Fixture: ", end - start return hash_512
def hash_256(): start = time() hash_256 = Hash(256) for word in words: hash_256.set(word, word) end = time() print "\n256 Fixture: ", end - start return hash_256
def test_bytes(self): data = os.urandom(32) data_array = list(data) try: h1 = Hash(data) h2 = Hash() h2.from_bytes(data_array) assert h1 == h2 print(h1, h2) except Exception as e: print("exception thrown: {}".format(e)) assert 1 == 0
def main(): os.system('cls') board = Hash() print( '----------------------\n Seu símbolo é o X\n----------------------') board.get_board() while (1): i = int(input('\nDigite um número para a linha: ')) j = int(input('Digite um número para a coluna: ')) board.set_board('X', i, j) if finish(board): break board = minimax(board) if finish(board): break os.system('cls') print( '----------------------\n Seu símbolo é o X\n----------------------' ) board.get_board()
def __init__(self, id, neighbor_count=0, device_count=0): self.id = id self.neighbor_count = neighbor_count self.device_count = device_count self.global_results = Queue() self.device_hash = Hash(self.device_count) self.results_hash = Hash(self.neighbor_count) self.receive_hash = Hash(self.neighbor_count) self.neighbor_map = Hash(self.neighbor_count) self.mutex = Lock() self.status = Status(running=True)
def __init__(self, ip, port): self.active = False self.key = Hash(port) print("==========================") print("Initializing Chord node...") print("Port:", port) print("Node ID:", self.key) print("==========================") self.ip = ip self.port = port self.address = (ip, port) self.predecessor = self.port self.fingertable = [] self.init_fingers() self.files = os.listdir() self.files.remove(os.path.basename(__file__)) self.files.remove('__pycache__') self.files.remove('hash.py') self.transfer_in_progress = False self.s_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) try: self.s_socket.bind(self.address) print("\n[INFO] Node initialized on port", port) except socket.error: print(str(socket.error)) sys.exit()
def scan_file(filename): """ Scan File checks OPSWAT's api store to see if a scan has been done already. If not create a new scan of file then store. @params filename can be either filename relative to directory or a whole file path @return nil but prints scanning result """ path = os.path.realpath(filename) # Step 1 Create Hash with MD5 algorithm hash = Hash.create(path) # Step 2 Hash Look Up result = OpswatService.hash_lookup(hash) # Step 3 if found go to Step 6 # Not Found have only a single key which is their Hash Value e.g {<Hash Value>: "Not Found"} if len(result.json().keys()) > 1: OpswatService.print_result(result, filename) # # Step 4 if not found then upload and return data_id else: data_id = OpswatService.upload_file(path, hash) # Step 5 repeatedly pull result with data_id result = OpswatService.get_results_by_data_id(data_id) # While scan_details are empty then call get result by data id until scan details are ready while len(result.json()["scan_results"]["scan_details"].keys()) == 0: result = OpswatService.get_results_by_data_id(data_id) # Step 6 print result OpswatService.print_result(result, filename)
def put_file(self, file): if file in os.listdir(): if file in self.files: print("[ERR] File already exists.") return self.files.append(file) target = self.successor(Hash(file)) if target == self.port: print("[UPDATE] PUT request complete.") return c_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) c_socket.connect(('127.0.0.1', target)) message = { 'type': REQUEST_PUT, 'source': self.port, 'filename': file } message = pickle.dumps(message) c_socket.send(message) print("[INFO] PUT request forwarded to the responsible node.") else: print("[ERR] Invalid file name for PUT request.")
def request_files( self, port, download_all=False): # if download_all == true, download all files c_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM) c_socket.connect(('127.0.0.1', port)) # request files message = {'type': REQUEST_FILES_LIST, 'source': self.port} message = pickle.dumps(message) c_socket.send(message) response = c_socket.recv(1024) response = pickle.loads(response) files = response print(response) files_to_get = [] if download_all == False: for file in files: if self.successor(Hash(file)) == self.port: files_to_get.append(file) else: files_to_get = files if len(files_to_get) == 0: print("[INFO] No files to receive.") else: for file in files_to_get: print("[INFO] Requesting file:", file) self.request_file(file, port)
def mine(): ''' Mine a new block. ''' # Get the previous block and proof and find the new proof. previous_block = blockchain.last_block previous_proof = previous_block["proof"] new_proof = proof.proof_of_work(previous_proof) # Because the user is mining a block, we want to reward them with a block, # so we create a transaction which will be added to the blockchain # in the new_block method below. blockchain.new_transaction(sender="0", recipient=node_id, amount=1) # Forge the new block by adding it to the blockchain previous_hash = Hash.hash(previous_block) new_block = blockchain.new_block(new_proof, previous_hash) # Send the response back with the block details. response = { 'message': "New Block Forged", 'index': new_block['index'], 'transactions': new_block['transactions'], 'proof': new_block['proof'], 'previous_hash': new_block['previous_hash'], } return jsonify(response), 200
def valid_chain(chain): ''' Determine if a given blockchain is valid by looping through each block and verifying both hash and the proof :param chain: <list> A blockchain to verify :return: <bool> True if valid, else False ''' last_block = chain[0] current_index = 1 while current_index < len(chain): block = chain[current_index] print(f'{last_block}') print(f'{block}') print('\n-------\n') # Check of the current block is correct if block['previous_hash'] != Hash.hash(last_block): return False # Check that the proof of work is correct # by validating the last blocks proof against the current blocks proof if not Proof.valid_proof(last_block['proof'], block['proof']): return False last_block = block current_index += 1 # If all above checks pass, then the block is valid. return True
def new_block(self, proof, previous_hash=None): ''' Creates a new Block in the Blockchain. :param proof: <int> The Proof given by the proof of work algorithm :param previous_hash: (Optional) <str> Hash of the previous block :return <dict> New Block. ''' # Create the new block with the current transactions # and linked to the previous hash or latest in the chain. block = { 'index': len(self.chain) + 1, 'timestamp': time(), 'transactions': self.current_transactions, 'proof': proof, # New block stores the has of the previous block. 'previous_hash': previous_hash or Hash.hash(self.chain[-1]) } # Reset the current list of transactions # as they have been mined into the above block. self.current_transactions = [] # Add the block to the chain. self.chain.append(block) return block
def checkCredentials(self, login, password): """ Checks if login and password are matching each other. :param login: string :param password: string :return: boolean """ user = self.db.selectUser(login) correctPassword = user[1] salt = user[2] h = Hash() hashedPassword = h.encryptPassword(password, salt) if (hashedPassword == correctPassword): return True else: return False
def __relationships_for(payload_slice, payload): if not 'relationships' in payload_slice: return for relation_name, relation_payload in payload_slice[ 'relationships'].iteritems(): relation_payload = Hash(relation_payload) for record in __process_relationship(relation_payload, payload): yield relation_name, relation_payload['links'], record
def test_position(self): h = Hash() h.add(1, 1) h.add(0, 0) h.add(3, 3) h.add(2, 2) self.assertEqual(h.position(1), 1)
def add(self, word): if not word in self.words: hash = Hash.hashString(word) self.words[word] = hash if not hash in self.hashes: self.hashes[hash] = set() self.hashes[hash].add(word) else: self.hashes[hash].add(word) self.collisions.add(hash)
def main(): lines = open("../data/InsertNamesMedium.txt").readlines() hashtable = Hash(len(lines)) time_started = time.time() dups = 0 for i in range(len(lines)): new_student = parseStudent(lines[i]) if not hashtable.Insert(new_student): dups += 1 time_elapsed = time.time() - time_started print("%s duplicates while inserting." % dups) print("%s seconds\n" % time_elapsed) time_started = time.time() hashtable.Traverse(averageAge) global TOTAL_AGE time_elapsed = time.time() - time_started print("average age of students", (TOTAL_AGE / hashtable.Size())) print("%s seconds\n" % time_elapsed) lines = open("../data/DeleteNamesMedium.txt").readlines() time_started = time.time() deleted = 0 failed = 0 for i in range(len(lines)): dummy_student = Student("", "", lines[i].strip(), "", "") if hashtable.Delete(dummy_student): deleted += 1 else: failed += 1 time_elapsed = time.time() - time_started print("%s students deleted." % deleted) print("%s errors" % failed) print("%s seconds\n" % time_elapsed) retrieved = 0 retrievedTotalAge = 0 lines = open("../data/RetrieveNamesMedium.txt").readlines() time_started = time.time() failed = 0 for i in range(len(lines)): dummy_student = Student("", "", lines[i].strip(), "", "") student = hashtable.Retrieve(dummy_student) if student is not None: retrieved += 1 retrievedTotalAge += int(student.age) else: failed += 1 avg = (retrievedTotalAge / retrieved) time_elapsed = time.time() - time_started print("%s students retrieved\naverage age of retrieved students %s" % (retrieved, avg)) print("%s errors" % failed) print("%s seconds\n" % time_elapsed)
def test_hash_block(self): ''' Ensures when a block is hashed, the SHA-256 hex is returned. ''' block = { 'index' : 1, 'timestamp' : '2017', 'transactions' : [], 'proof' : '123', # New block stores the has of the previous block. 'previous_hash' : 'previous' } result = Hash.hash(block) self.assertEqual('8b8f4e1c0dbce1064bb672b8d002b510b52e8a8c14cce01ae07f48929543c1e3', result)
def writeHashToRedis(data): key_hash = '' if data['params'].has_key('files'): if len(data['params']['files']) > 1: if data['params'].has_key('url'): key_hash = data['params']['url']['hash'] else: ret_code, bt_file_name = download_file( data['params']['seed_file']['path'], gv.file_tmpdir) if ret_code == True: seed_file_content = '' with open(bt_file_name, 'r') as fp: seed_file_content = fp.read() seed_file_hash = Hash(filename=bt_file_name, content=seed_file_content).value data['params']['seed_file']['hash'] = seed_file_hash key_hash = seed_file_hash try: os.remove(bt_file_name) except OSError: g_logger.error( trans2json("delete bt file %s error %s" % (bt_file_name, traceback.format_exc()))) for i in data['params']['files']: key = "%s#%s" % ( data['params']['additional_info']['client_id'], key_hash) dna_hash = {} code = i['code'] if code == GENERATE_SUCESS: dna_hash[i['hash']] = None dna_hash['file_path'] = i['file_path'] elif code in (NOT_COPYWRITE, FILTERING): dna_hash[i['hash']] = 0 dna_hash['file_path'] = i['file_path'] elif code == GENERATE_FAILED: dna_hash[i['hash']] = 3 dna_hash['file_path'] = i['file_path'] writesetredis(gv.rds_conn, key, dna_hash)
def __init__(self, _id, faulty): self.id = _id self.__read_conf(self) self.blockchain = Blockchain(self.genesis_time, self.pure, self.depth_cancel_block != -1) self.current_block = self.blockchain.get_last() self.hash = Hash(self) self.stop_mining = None self.nonce_list = [] self.transaction_list = [] if self.pure: if self.malicious: self.state = MaliciousPureBlockchain(self) else: self.state = PureBlockchain(self) else: if self.malicious: self.state = MaliciousMining(self) else: self.state = Mining(self) self.broadcast = Broadcast(self) self.reinforcement_pom = ReinforcementPOM(self) self.faulty = faulty self.already_found = 0 #flag to know that it's start of a new mining that we later use for the condition # for obtaining a block to mine on top of //line 94 self.start = True self.res_loggerM = self.setup_logger( 'results_loggerM' + str(self.id), 'results_logfileM' + str(self.id) + '.log') self.res_loggerH = self.setup_logger( 'results_loggerH' + str(self.id), 'results_logfileH' + str(self.id) + '.log') self.counter = 0 self.t = dt.datetime.now()
def sentimentAnalysis(api, topicWord, noOfTweets, topMsg, topHash): w = Word() h = Hash() r = Retweet() #topicWord = input ( "Enter the topic you want to get its sentiment" ) #noOfTweets = int ( input ( "Enter the number of tweets you want to get" ) ) #topMsg = int ( input ( "Enter the number of top words used by people on your product" ) ) #topHash = int ( input ( "Enter the number of top hashtag used by people on your product" ) ) fromWhichLang = "en" # fromWhen = input("Enter the date from which you want the topic to be searched from within 7 day:format->YYYY-MM-DD")#until optional Returns tweets created before the given date. Date should be formatted as YYYY-MM-DD. Keep in mind that the search index has a 7-day limit. In other words, no tweets will be found for a date older than one week. 2015-07-19 # fromWhichLang = input("Enter which language the tweets where tweeted in") # fromWhichLocation = input("Enter which location the tweets where tweeted in") # fromWhichLocation = "-24.653257,25.906792,1000" # fromWhen = "2019-01-12" # topic = tweepy.Cursor(api.search, q = topicWord,lang = fromWhichLang,geocode = fromWhichLocation,until = fromWhen).items(noOfTweets) topic = tweepy.Cursor(api.search, q=topicWord, lang=fromWhichLang).items(noOfTweets) positive = 0 negative = 0 neutral = 0 polarity = 0 for tweet in topic: tweet.text = tweet.text.replace( tweet.text, re.sub(r'@[A-Za-z0-9]+', '', tweet.text)) tweet.text = tweet.text.replace(tweet.text, re.sub(r'http\S+', '', tweet.text)) print(tweet.text) analysis = TextBlob(tweet.text) polarity = analysis.sentiment.polarity if analysis.sentiment.polarity > 0.0: positive += 1 elif analysis.sentiment.polarity < 0.0: negative += 1 else: neutral += 1 total_tweets = neutral + negative + positive positive = format(positive / total_tweets * 100, '.2f') negative = format(negative / total_tweets * 100, '.2f') neutral = format(neutral / total_tweets * 100, '.2f') polarity = format(polarity, '.2f') print(positive) if float(polarity) > 0.0: print("Positive") elif float(polarity) < 0.0: print("Negative") else: print("Neutral") labels = [ 'Positive[' + str(positive) + '%]', 'Negative[' + str(negative) + '%]', 'Neutral[' + str(neutral) + '%]' ] size = [positive, negative, neutral] color = ['blue', 'red', 'green'] patches, text = plt.pie(size, colors=color, startangle=90) plt.legend(patches, labels, loc='best') plt.title("The rection of people on " + str(topicWord) + " by analysing on " + str(total_tweets) + " tweets") plt.axis('equal') plt.tight_layout() plt.show() topic2 = tweepy.Cursor(api.search, q=topicWord, lang=fromWhichLang).items(noOfTweets) topic3 = tweepy.Cursor(api.search, q=topicWord, lang=fromWhichLang).items(noOfTweets) if len(h.topFiveHash(topic2, topHash)) > 0: print(h.topFiveHash(topic2, topHash)) plt.bar(range(len(h.topFiveHash(topic2, topHash))), list(h.topFiveHash(topic2, topHash).values()), width=1.5) plt.xticks(range(len(h.topFiveHash(topic2, topHash))), list(h.topFiveHash(topic2, topHash).keys())) plt.show() else: print("/n There were no hashtag") if 1 > 0: # len(topFiveWords(topic2, topMsg))>0: print(w.topFiveWords(topic3, topMsg)) # plt.bar(range(len(topFiveWords(topic2, topMsg))), list(topFiveWords(topic2, topMsg).values()),width = 1.5) # plt.xticks(range(len(topFiveWords(topic2, topMsg))), list(topFiveWords(topic2, topMsg).keys())) # plt.show() else: print( "/n There is no popular thing people are saying about your product" )
def hash(str): return Hash.hashString(str)
#testing hash from hash import Hash import os val = "\"10.10.0.2:13800\",\"10.10.0.3:13800\",\"10.10.0.4:13800\",\"10.10.0.5:13800\",\"10.10.0.6:13800\",\"10.10.0.7:13800\"" os.environ["VIEW"] = val os.environ["ADDRESS"] = "10.10.0.6:13800" os.environ["REPL_FACTOR"] = "3" #print(os.getenv("VIEW")) h = Hash() #print(h.checkHash("a")) #val = "\"10.10.0.2:13800\",\"10.10.0.3:138\",\"10.10.0.4:13800\"" #print(h.checkHash("e")) #print(h.shard_id) print(h.getShard())
import time from hash import Hash for i in range(16, 100): print '*********************************************************************' print 'bits = ' + str(i) print '*********************************************************************' startTime = time.time() myHash = Hash('abc', i) myHash.startAttacks() endTime = time.time() print 'time (in sec) = ' + str(endTime - startTime) print '\n\n'
def query_hash(data): result_hash_list = [] start_time = time.time() if data['params'].has_key('url'): if data['params']['url']['hash'] != None and data['params']['url'][ 'hash'] != '': ret_code, result = query_vddb_async(data['params']['url']['hash'], data) if ret_code == 1: end_time = time.time() #gv.statsd_conn.timing("thunder.querybroker_qbpull", (end_time-start_time)*1000) return ret_code, result result_hash_list.append((ret_code, result)) if data['params']['thunder_hash'] != None and data['params'][ 'thunder_hash'] != '': ret_code, result = query_vddb_async(data['params']['thunder_hash'], data) if ret_code == 1: end_time = time.time() #gv.statsd_conn.timing("thunder.querybroker_qbpull", (end_time-start_time)*1000) return ret_code, result result_hash_list.append((ret_code, result)) if data['params'].has_key('seed_file'): seed_file_hash = '' if data['params']['seed_file']['hash'] != '': seed_file_hash = data['params']['seed_file']['hash'] else: ret_code, bt_file_name = download_file( data['params']['seed_file']['path'], gv.file_tmpdir) if ret_code: client_id = data['params']['additional_info']['client_id'] with open(bt_file_name, 'rb') as fp: seed_file_content = fp.read() seed_file_hash = Hash(filename=bt_file_name, content=seed_file_content).value data['params']['seed_file']['hash'] = seed_file_hash try: os.remove(bt_file_name) except OSError: g_logger.error( trans2json("delete bt file %s error %s" % (bt_file_name, traceback.format_exc()))) ret_code, result = query_vddb_async(seed_file_hash, data) if ret_code == 1: end_time = time.time() #gv.statsd_conn.timing("thunder.querybroker_qbpull", (end_time-start_time)*1000) return ret_code, result result_hash_list.append((ret_code, result)) if data['params'].has_key('files'): hash_list = [] data_list = [] for i in data['params']['files']: dna_hash = i['hash'] hash_list.append(dna_hash) data_list.append(data) result_list = map(query_vddb_async, hash_list, data_list) for i in range(len(result_list)): if result_list[i][0] == 1: end_time = time.time() #gv.statsd_conn.timing("thunder.querybroker_qbpull", (end_time-start_time)*1000) return result_list[i][0], result_list[i][1] end_time = time.time() #gv.statsd_conn.timing("thunder.querybroker_qbpull", (end_time-start_time)*1000) return 3, None