def verify_hashed_password(password, header_salt_hash, backend="cryptographyless"): header, salt, correct_output = load_data(header_salt_hash) algorithm, sub_algorithm, iterations, salt_size, output_size = load_data(header) if algorithm == "pbkdf2hmac": output = hashlib.pbkdf2_hmac(sub_algorithm, header + password, salt, iterations, output_size) else: return -1 return constant_time_comparison(output, correct_output)
def verify_hashed_password(password, header_salt_hash, backend="cryptographyless"): header, salt, correct_output = load_data(header_salt_hash) algorithm, sub_algorithm, iterations, salt_size, output_size = load_data( header) if algorithm == "pbkdf2hmac": output = hashlib.pbkdf2_hmac(sub_algorithm, header + password, salt, iterations, output_size) else: return -1 return constant_time_comparison(output, correct_output)
def _decrypt(data, key, mac_key): """ usage: _decrypt(data, key, mac_key) => (plaintext, extra_data) or plaintext Returns (extra_data, plaintext) when extra data is available Otherwise, just returns plaintext data. Authenticity and integrity of the plaintext/extra data is guaranteed. """ header, encrypted_data, nonce, mac_tag, extra_data = load_data(data) algorithm, mode, mac_algorithm = header.split('_', 2) mac_algorithm = mac_algorithm.lower() try: hasher = getattr(hashlib, mac_algorithm) except AttributeError: raise ValueError("Unsupported mode {}".format(header)) if hmac.HMAC(mac_key, header + extra_data + nonce + encrypted_data, hasher).digest() == mac_tag: assert mode == "hmac" ciphert = _hash_stream_cipher_hmac plaintext = cipher(encrypted_data, key, nonce, algorithm) if extra_data: return (plaintext, extra_data) else: return plaintext else: raise InvalidTag("Invalid tag")
def compute_search_sort(search_data, trained_data): """ compare two datasets and return the matching data""" if not trained_data: # if no data is trained prior, then try to load the train data from pickled file trained_data = persistence.load_data() dm = descriptor_matcher() match_data =[] # start matching for the first search-image with all train-images (only the first found face in search image is computed - could be changed here or in file descriptor_matcher.py) for i in range(len(search_data)): match_data.append(Match_Data(search_data[i].filename)) # add searchfile data to object if hasattr(search_data[i], 'facedata'): # check if this image has faces with corresponding data logging.info('Start match with searchfile %s (search.py)' % search_data[i].filename) matcheddata = dm.match(trained_data, search_data[i]) # compute Matching match_data[i].add_matched_data(matcheddata) # add result to match_data container else: logging.error('Error in executing Descriptor-Matcher - Maybe no face in Search-File %s (search.py)' % search_data[i].filename) if not match_data: logging.critical('Error in executing Descriptor-Matcher - No match-data available (search.py)') sys.exit() else: return match_data
def test_pack_unpack(): ciphertext = "as;flkjasdf;lkjasfd" iv = "21209348afdso" tag = "zpx98yvzclkj" extra_data = "1x897=a[19njkS" packed = save_data(ciphertext, iv, tag, extra_data) _ciphertext, _iv, _tag, _extra_data = load_data(packed) assert _ciphertext == ciphertext assert _iv == iv assert _tag == tag assert _extra_data == extra_data
def verify_mac(key, packed_data, algorithm="SHA256", backend=None): """ Verifies a message authentication code as obtained by apply_mac. Successful comparison indicates integrity and authenticity of the data. """ mac, data = load_data(packed_data) calculated_mac = hmac.HMAC(key, algorithm + "::" + data, Hash_Factory(algorithm)).digest() try: if not hmac.compare_digest(mac, calculated_mac): raise InvalidTag() except InvalidTag: # to be consistent with how it is done when the cryptography package is used return InvalidTag() else: return data
def test__encrypt__decrypt(): packet = _encrypt(_TEST_MESSAGE, _TEST_KEY, _TEST_KEY, extra_data="extra_data") #print "Encrypted packet: \n\n\n", packet decrypted = _decrypt(packet, _TEST_KEY, _TEST_KEY) assert decrypted == (_TEST_MESSAGE, "extra_data"), decrypted header, encrypted_data, nonce, mac_tag, extra_data = load_data(packet) extra_data = "Changed" packet = save_data(header, encrypted_data, nonce, mac_tag, extra_data) try: _decrypt(packet, _TEST_KEY, _TEST_KEY) except InvalidTag: pass else: print "Failed to protect authenticity/integrity of extra_data" assert False
def rebalance_data(): data = load_data(DATA_FILE) new_data = {} send_count = 0 for k in data: location = get_location(k) # this needs to only send data to nodes that need it?? for p in get_ports(location): if p != PORT: print(f"sending record at location {location} to port {p}") msg = serialize_add_record(k, data[k]['value'], data[k]['lsn']) simple_send(msg, p) send_count += 1 else: new_data[k] = data[k] store_data(DATA_FILE, new_data) logging.info(f"sent {send_count} records")
f"received partition table: {sorted(partition_table.items())}") known_ports = [v for v in partition_table.values()] if PORT not in known_ports: table_updates = calculate_table_updates() partition_table.update(table_updates) logging.info( f"calculated new partition table: {sorted(partition_table.items())}" ) threading.Thread(target=rebalance_data).start() for p in known_ports: if p == PORT: continue msg = serialize_add_nodes(table_updates) threading.Thread(target=simple_send, args=(msg, p)).start() else: data = load_data(DATA_FILE) for p in known_ports: if p == PORT: continue lsn = max_lsn(data) msg = serialize_catchup(lsn, PORT) logging.info(f"requesting lsn {lsn} from port {p}") response = simple_send_and_receive(msg, p) data.update(deserialize_records(BytesIO(response))) store_data(DATA_FILE, data) logging.info("update from replicas complete") s = socket.socket(socket.AF_INET) try: s.bind(('localhost', PORT)) except:
from persistence import load_data, get_team from score_calculator import average_score, synergy_score, onsite_score load_data() at = get_team(1) fs = average_score(at) gs = synergy_score(at) scores = onsite_score(at) print("Full scores: " + str([f"{k}:{v:.3f}" for k, v in fs.items()])) print("Full scores: " + str([f"{k}:{v:.3f}" for k, v in gs.items()])) print("Full scores: " + str([f"{k}:{v:.3f}" for k, v in scores.items()]))
def test_valid_data(): persistence.load_data() persistence._all_users = {} persistence._all_teams = {}
import json import os from flask_cors import CORS from flask import Flask, request, send_from_directory from persistence import get_team, get_user, load_data from score_calculator import onsite_score load_data() # Load in dummy data: TODO: change this once we have a real app app = Flask( __name__, static_url_path="", static_folder="../frontend", template_folder="../frontend", ) CORS(app) @app.route("/team") def team_route(): """Returns team represented as json. """ if "team_id" not in request.args: return "team_id is missing in the request.", 400 team_id = int(request.args["team_id"]) try: team = get_team(team_id) except Exception as e: return f"invalid team id {e}", 400