def register(): error = "" #Hey mate, if you are logged in, you don't have to register... if (session.get('auth')): return redirect(url_for('main')) #Handle post request from register form form = f.RegisterForm() if request.method == 'POST': user = SERVER.scanUsername(request.form['username']) #get local user correctPassword = (hasher.hash( request.form['password']) == hasher.hash( request.form['password2'])) if user is None and correctPassword == True and form.validate_on_submit( ): uid = SERVER.registerNewUser(request.form['username'], hasher.hash(request.form['password'])) session['auth'] = SERVER.getUser(uid).getAuthCode() return redirect(url_for('main')) else: if (correctPassword == False): error = "Repeat password correctly." if (user is not None): error = "Account with this username already exists!" if form.validate_on_submit() == False: error = "Username minimum length is 5 and password should have at least 8 characters!" return render_template('register.html', title=TITLE, form=form, error=error)
def register_patient(self, patient_name, patient_id): """ Function to register the patient. :param patient_name: Patient name :param patient_id: Patient id :return: card """ uid = patient_name + patient_id hash_uid = hash(uid) # Check if hashed uid resides on public blockchain. print(">>> Sending request to bc to check if %s exists in blockchain" %(hash_uid)) response = self.send_message_to_bc(bc_msg.contains_hash_uid_msg(hash_uid)) if response == ERROR: return None if response: print("ERROR: Patient " + patient_name + " is affiliated with a hospital already") return None # Generate keys. priv_key, pub_key = crypto.generate_keys() # Update public blockchain. self.add_to_blockchain(hash_uid, pub_key); card = Card(patient_name, patient_id, uid, priv_key, self.name) # Insert into hospital k,v store. self.insert(uid, pub_key, MedicalRecord(self.name, card)) return card
def file_upload(): if request.method == 'POST': logger.info(request.form) form = FileUploadForm(CombinedMultiDict((request.files, request.form))) file = request.files.get('file') form.hash.data = hasher.hash(file) logger.info(form.hash.data) if form.validate(): logger.info('Form is valid') file.stream.seek(0) timestamp = int(datetime.datetime.now().timestamp()) filename = str(timestamp) + secure_filename(file.filename) file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename)) logger.info(filename + ' Saved') db_file = File(name=form.upload_name.data, cve_number=form.cve_number.data, full_path=os.path.join(app.config['UPLOAD_FOLDER'], filename), description=form.description.data, file_type=form.upload_type.data, hash=form.hash.data) db.session.add(db_file) db.session.commit() # logger.info(str(db_file) + ' saved to db') flash('File Uploaded', 'success') else: # logger.info(form.errors) flash_form_errors(form) if request.referrer: return redirect(request.referrer) else: return redirect(url_for('index'))
def edit_file(id): file = File.query.filter_by(id=id).first() if not file: return render_template( '404.html', error_number='Error 404', error_text='File Not Found', message= 'File could not be found. Please check your url and try again') if request.method == 'GET': with open(file.full_path, 'r') as old_file: toDisplay = old_file.read() return render_template('editFile.html', file=file, content=toDisplay) else: with open(file.full_path, 'w') as new_file: new_file.write(request.form.get('file_text')) with open(file.full_path, 'r') as hash_file: new_hash = hasher.hash(hash_file) h = File.query.filter(File.hash == new_hash, File.id != file.id).first() if h: flash(f'Error: Identical file already exists as {h.name}', 'danger') else: file.hash = new_hash db.session.commit() flash('File Updated', 'success') return redirect(url_for('index'))
def removeDuplicate(path): isabs = os.path.isabs(path) filelist = {} if isabs == False: path = os.path.abspath(path) exists = os.path.isdir(path) if exists == False: print( "Provided argument is not a directory. Please provide valid directory" ) exit() for folder, subfolders, files in os.walk(path): for filename in files: filename = os.path.join(folder, filename) hashcode = hasher.hash(filename) if hashcode in filelist: filelist[hashcode].append(filename) else: filelist[hashcode] = [filename] data = list(filter(lambda x: len(x) > 1, filelist.values())) foldername = os.path.abspath('Marvellous') if (os.path.isdir(foldername) == False): os.mkdir('Marvellous') logfile = os.path.join( foldername, 'Marvellous-%s.log' % (time.strftime("%Y-%m-%d-%H-%M-%S"))) f = open(logfile, 'w') for files in data: cnt = 0 for filename in files: cnt += 1 if (cnt > 1): f.write('File Deleted : %s' % filename + '\n') os.remove(filename) f.close() connected = is_connected() username = '******' password = '******' if connected: startTime = time.time() MailSender(username, password, logfile, time.ctime()) endtime = time.time() print("Took %s second to evaluate" % (endtime - startTime)) else: print("There is no internet connection")
def handle_update_event(self, event): filepath = os.path.join(Config().get("core", "syncdir"), event.path) #first, copy the file over to a temporary directory, get its hash, #upload it, and then move it to the filename with that hash value handle, tmppath = mkstemp(dir=Config().get("core", "cachedir")) os.close(handle) #we don't really want it open, we just want a good name try: copy2(filepath, tmppath) except IOError: logging.warning("Dropping update event because file was deleted before we could upload it: %s" % (str(event))) return #get the mode of the file stats = os.stat(filepath) event.permissions = str(stat.S_IMODE(stats.st_mode)) #hash the temporary file event.hash = hash(tmppath)[0] logging.debug("HASHED "+str(event)) #make sure the most recent version of this file doesn't match this one #otherwise it's pointless to re-upload it res = self.database.execute("""SELECT * FROM events WHERE localpath=? AND rev != 0 ORDER BY rev DESC LIMIT 1""", (event.path,)) latest = next(res, None) if latest is not None: e = Event(0) e.fromseq(latest) if e.hash == event.hash: #returning because hashes are equal #but first, remove the temporary file in the cache os.remove(tmppath) return res = self.database.execute("SELECT * FROM events WHERE hash=? AND rev!=0", (event.hash,)) sameHash = next(res, None) #if this file isn't already uploaded, add it to the list to upload, and #upload them in a batch when we have a chance if sameHash is None: self.to_upload.append((event, tmppath)) return e = Event(0) e.fromseq(sameHash) event.storagekey = e.storagekey #add event to the database self.database.execute("INSERT INTO events VALUES (0,?,?,?,?,?,?,?)", event.totuple()[1:]) #move tmp file to hash-named file in cache directory cachepath = os.path.join(Config().get("core", "cachedir"), event.hash) move(tmppath, cachepath) self.sender_queue.put(event)
def registerNewUser(cls, uid, uusername, upasswordHash): """Creates a new user from the given data.""" id = uid username = uusername passwordHash = upasswordHash resourcePoints = dict() totalPoints = 0 authCode = hasher.hash(str(id) + uusername + upasswordHash) return cls(id, username, passwordHash, resourcePoints, totalPoints, authCode)
def transfer(self, card, card_path, dst_hospital_name, dst_hospital_address, dst_hospital_port): """ Function to transfer patient data to another hospital. :param card: card :param card_path: card path :param dst_hospital_name: name of hospital where records are being transferred to :param dst_hospital_address: hospital address to transfer data to :param dst_hospital_port: hospital port :return: boolean """ # Verify that card and hospital name match. if not self.valid_card(card): print("ERROR: invalid card, unable to accomodate transfer request") return False # Obtain the hash index. hash_uid = hash(card.uid) # Get the public key from the public blockchain. print(">>> Sending request to get pub_key for read request") pub_key = self.send_message_to_bc(bc_msg.get_pub_key(hash_uid)) # The encrypted uid corresponds to the key in the hospital k,v store. hosp_db_key = crypto.encrypt(card.uid, pub_key) # Confirm that data belongs to the card holder. if self.data_belongs_to_user(hosp_db_key, card): # Check if other hospital is able to transfer the records if self.db.get(hosp_db_key): blocks = self.db.get(hosp_db_key).split(",") for block in blocks: response = self.send_msg(hospital_msg.transfer_write_msg(hosp_db_key, block), dst_hospital_address, dst_hospital_port) #if dst_hospital.transfer_write(hosp_db_key, self.db.get(hosp_db_key)): if isinstance(response , int): print("Hospital server error") return False if not response.get(hospital_msg.RESPONSE): return False # Remove data from this hospital. self.db.pop(hosp_db_key) # Even if there may be no data to transfer, update the card. # Update hospital name on patient card. card.update(dst_hospital_name) # Update card to store the location of where the private key is stored. card.priv_key = card.priv_key_path f = open(card_path, "w+") f.write(str(card)) f.close() print("Successfully transferred records to %s" %(card.hospital_name)) return True else: print("ERROR: No data found for patient") return False
def login(): error = "" #If you are logged in, just proceed if (session.get('auth')): return redirect(url_for('main')) #Handle login form form = f.LoginForm() if request.method == 'POST': user = SERVER.scanLogin(request.form['username'], hasher.hash(request.form['password'])) if user is None: error = 'Invalid Credentials.' else: session['auth'] = user.getAuthCode() return redirect(url_for('main')) return render_template('login.html', title=TITLE, form=form, error=error)
def read(self, uid): """ Function to return all encrypted medical records associated with the patient using their uid. :param uid: Patient uid :return: encrypted medical records """ # Obtain the hash index. hash_uid = hash(uid) # Get the public key from the public blockchain. print(">>> Sending request to get pub_key for read request") pub_key = self.send_message_to_bc(bc_msg.get_pub_key(hash_uid)) if pub_key: # The encrypted uid corresponds to the key in the hospital k,v store. hosp_db_key = crypto.encrypt(uid, pub_key) blocks = self.get_blocks(hosp_db_key) if blocks: return blocks else: print("ERROR: No data found for patient") return None
def write(self, card, medical_record, phy): """ Function to update hospital k,v store with more up to date medical record. :param card: Patient card :param medical_record: Medical record :param phy: Physician :return: boolean """ # Validate physician. if not self.valid_phy(phy): return False # Verify that card and hospital name match. if not self.valid_card(card): return False # Obtain the hash index. hash_uid = hash(card.uid) # Get the public key from the public blockchain. print(">>> Sending request to get pub_key for write request") pub_key = self.send_message_to_bc(bc_msg.get_pub_key(hash_uid)) if not pub_key: return False # The encrypted uid corresponds to the key in the hospital k,v store. hosp_db_key = crypto.encrypt(card.uid, pub_key) # Confirm that data belongs to the card holder. if self.data_belongs_to_user(hosp_db_key, card): # Obtain the most recent medical record for that patient. most_recent_write = self.get_last_block(hosp_db_key) if most_recent_write: # Decrypt the most recent write and convert string to dictionary object # for data processing. prev = self.deconstruct(crypto.decrypt(most_recent_write, card.priv_key)) # Merge the prev and current record. merged = self.merge(prev, medical_record) # Insert the updated medical record into the hospital k,v store. self.insert(card.uid, card.priv_key, str(merged)) else: self.insert(card.uid, card.priv_key, str(medical_record)) return True else: print("ERROR: Private key does not correspond to public key") return False
def download(self): syncdir = Config().get("core", "syncdir") cachedir = Config().get("core", "cachedir") #make all the temporary files for the downloads tmppaths = [] for e in self.to_download: handle, tmppath = mkstemp(dir=cachedir) close(handle ) #we don't really want it open, we just want a good name tmppaths.append(tmppath) #now, actually download all the files self.storage.getm( ((tmppath, event.hash, event.storagekey) for tmppath, event in izip(tmppaths, self.to_download))) #TODO handle failure of storage.get (will throw exception if fails) #now, move all the downloaded files to the cache for tmppath, event in izip(tmppaths, self.to_download): h = hash(tmppath)[0] if h != event.hash: logging.error( " DOWN: hash doesn't match downloaded file event: " + str(event)) logging.error(" : offending hash: " + h) continue #TODO handle this error somehow? #set the permissions, if we have them if len(event.permissions.strip()) > 0: chmod(tmppath, int(event.permissions)) #move temp file to hashed cache file cachepath = path.join(cachedir, event.hash) move(tmppath, cachepath) dst = path.join(Config().get("core", "syncdir"), event.path) self.cache_to_asink_dir(cachepath, dst) self.to_download = []
def remove(self, card): """ Function to remove all patient data. :param card: card :return: boolean """ # Verify that card and hospital name match. if not self.valid_card(card): return False # Obtain the hash index. hash_uid = hash(card.uid) # Get the public key from the public blockchain. print(">>> Sending request to get pub_key for remove request") pub_key = self.send_message_to_bc(bc_msg.get_pub_key(hash_uid)) # The encrypted uid corresponds to the key in the hospital k,v store. hosp_db_key = crypto.encrypt(card.uid, pub_key) # Confirm that data belongs to the card holder. if self.data_belongs_to_user(hosp_db_key, card): self.db.pop(hosp_db_key) return True else: print("ERROR: Private key does not correspond to public key") return False
def download(self): syncdir = Config().get("core", "syncdir") cachedir = Config().get("core", "cachedir") #make all the temporary files for the downloads tmppaths = [] for e in self.to_download: handle, tmppath = mkstemp(dir=cachedir) close(handle) #we don't really want it open, we just want a good name tmppaths.append(tmppath) #now, actually download all the files self.storage.getm(((tmppath, event.hash, event.storagekey) for tmppath, event in izip(tmppaths, self.to_download))) #TODO handle failure of storage.get (will throw exception if fails) #now, move all the downloaded files to the cache for tmppath, event in izip(tmppaths, self.to_download): h = hash(tmppath)[0] if h != event.hash: logging.error(" DOWN: hash doesn't match downloaded file event: "+str(event)) logging.error(" : offending hash: "+h) continue #TODO handle this error somehow? #set the permissions, if we have them if len(event.permissions.strip()) > 0: chmod(tmppath, int(event.permissions)) #move temp file to hashed cache file cachepath = path.join(cachedir, event.hash) move(tmppath, cachepath) dst = path.join(Config().get("core", "syncdir"), event.path) self.cache_to_asink_dir(cachepath, dst) self.to_download = []
import os import sys sys.path.append(os.path.dirname(os.path.dirname(os.path.realpath(__file__)))) import pprint import json import hasher with open("tests.json","rb") as tf: tests = json.load(tf) for t in tests: pprint.pprint([t["data"],t["options"]]) try: assert hasher.hash("md5",t["data"],**t["options"]) == t["md5sum"] assert hasher.hash("sha1",t["data"],**t["options"]) == t["sha1sum"] assert hasher.hash("sha256",t["data"],**t["options"]) == t["sha256sum"] assert hasher.hash("sha512",t["data"],**t["options"]) == t["sha512sum"] except: print json.dumps({ "md5sum": hasher.hash("md5",t["data"],**t["options"]), "sha1sum": hasher.hash("sha1",t["data"],**t["options"]), "sha256sum": hasher.hash("sha256",t["data"],**t["options"]), "sha512sum": hasher.hash("sha512",t["data"],**t["options"]) },indent = 4)
def handle_update_event(self, event): filepath = os.path.join(Config().get("core", "syncdir"), event.path) #first, copy the file over to a temporary directory, get its hash, #upload it, and then move it to the filename with that hash value handle, tmppath = mkstemp(dir=Config().get("core", "cachedir")) os.close( handle) #we don't really want it open, we just want a good name try: copy2(filepath, tmppath) except IOError: logging.warning( "Dropping update event because file was deleted before we could upload it: %s" % (str(event))) return #get the mode of the file stats = os.stat(filepath) event.permissions = str(stat.S_IMODE(stats.st_mode)) #hash the temporary file event.hash = hash(tmppath)[0] logging.debug("HASHED " + str(event)) #make sure the most recent version of this file doesn't match this one #otherwise it's pointless to re-upload it res = self.database.execute( """SELECT * FROM events WHERE localpath=? AND rev != 0 ORDER BY rev DESC LIMIT 1""", (event.path, )) latest = next(res, None) if latest is not None: e = Event(0) e.fromseq(latest) if e.hash == event.hash: #returning because hashes are equal #but first, remove the temporary file in the cache os.remove(tmppath) return res = self.database.execute( "SELECT * FROM events WHERE hash=? AND rev!=0", (event.hash, )) sameHash = next(res, None) #if this file isn't already uploaded, add it to the list to upload, and #upload them in a batch when we have a chance if sameHash is None: self.to_upload.append((event, tmppath)) return e = Event(0) e.fromseq(sameHash) event.storagekey = e.storagekey #add event to the database self.database.execute("INSERT INTO events VALUES (0,?,?,?,?,?,?,?)", event.totuple()[1:]) #move tmp file to hash-named file in cache directory cachepath = os.path.join(Config().get("core", "cachedir"), event.hash) move(tmppath, cachepath) self.sender_queue.put(event)