def get_entry_proof(entry_number, total_entries, proof_identifier): current_app.logger.info("Get entry proof for entry %s of %s", str(entry_number), str()) if proof_identifier != 'merkle:sha-256': current_app.logger.warning("Invalid proof identifier supplied: %s", proof_identifier) raise ApplicationError("Invalid proof identifier", "E400", 400) cursor = start() try: mtree_data = MerkleData(cursor) mtree = MerkleTree(mtree_data) path = mtree.entry_proof(int(entry_number), int(total_entries)) result = { "proof-identifier": "merkle:sha-256", "entry-number": entry_number, "merkle-audit-path": [] } for item in path: current_app.logger.debug("{}".format(type(item))) result['merkle-audit-path'].append("sha-256:" + b16encode(item).decode()) current_app.logger.info("Return entry proof (path length %d)", len(result['merkle-audit-path'])) return Response(json.dumps(result), mimetype='application/json') finally: commit(cursor)
def get_consistency_proof(total_entries_1, total_entries_2, proof_identifier): current_app.logger.info("Get consistency proof for trees with sizes %s and %s", str(total_entries_1), str(total_entries_2)) if proof_identifier != 'merkle:sha-256': current_app.logger.warning("Invalid proof identifier supplied: %s", proof_identifier) raise ApplicationError("Invalid proof identifier", "E400", 400) cursor = start() try: mtree_data = MerkleData(cursor) mtree = MerkleTree(mtree_data) nodes = mtree.consistency_proof(int(total_entries_1), int(total_entries_2)) result = { "proof-identifier": "merkle:sha-256", "merkle-consistency-nodes": [] } for item in nodes: current_app.logger.debug("{}".format(type(item))) result['merkle-consistency-nodes'].append("sha-256:" + b16encode(item).decode()) current_app.logger.info("Return consistency proof (%d nodes)", len(result['merkle-consistency-nodes'])) return Response(json.dumps(result), mimetype='application/json') finally: commit(cursor)
def read_item_entries(item_hash): app.logger.info("Read item entries for '%s'", item_hash) cursor = start() try: cursor.execute( 'SELECT entry_number, entry_timestamp, item_hash, key, item_signature ' 'FROM entry ' 'WHERE item_hash = %(hash)s' 'ORDER BY entry_number DESC', {'hash': item_hash}) rows = cursor.fetchall() if len(rows) == 0: app.logger.warning("No item entries found for '%s'", item_hash) return None result = [] for row in rows: result.append({ "entry-number": row['entry_number'], "entry-timestamp": row['entry_timestamp'].strftime('%Y-%m-%d %H:%M:%S.%f'), "item-hash": row['item_hash'], "key": row['key'], "item-signature": row['item_signature'] }) app.logger.info("%d item entries returned", len(result)) return result finally: commit(cursor)
def read_record_entries(field_value): app.logger.info("Read record entries for '%s'", field_value) cursor = start() try: cursor.execute( 'SELECT entry_number, entry_timestamp, item_hash, key ' 'FROM entry ' 'WHERE key=%(key)s ORDER BY entry_number DESC', {'key': field_value}) rows = cursor.fetchall() if len(rows) == 0: app.logger.warning("No record entries found for '%s'", field_value) return None result = [] for row in rows: result.append({ "entry-number": row['entry_number'], "entry-timestamp": row['entry_timestamp'].strftime('%Y-%m-%d %H:%M:%S.%f'), "item-hash": row['item_hash'], "key": row['key'] }) app.logger.info("%d record entries returned", len(result)) return result finally: commit(cursor)
def read_record_by_field_value(field_value): app.logger.info("Read record by field value '%s'", field_value) cursor = start() try: cursor.execute( 'SELECT e.entry_number, e.entry_timestamp, e.item_hash, e.key, i.item ' 'FROM entry e, item i ' 'WHERE e.item_hash = i.item_hash ' 'AND key=%(key)s ORDER BY e.entry_number DESC LIMIT 1', {'key': field_value}) row = cursor.fetchone() if row is None: return None return { "entry-number": row['entry_number'], "entry-timestamp": row['entry_timestamp'].strftime('%Y-%m-%d %H:%M:%S.%f'), "item-hash": row['item_hash'], "key": row['key'], "item": row['item'] } finally: commit(cursor)
def read_entry(entry_number): app.logger.info("Read entry '%s'", str(entry_number)) cursor = start() try: entry_count = count_entries(cursor) if int(entry_number) > entry_count: return None cursor.execute( 'SELECT entry_timestamp, item_hash, key, item_signature ' 'FROM entry ' 'WHERE entry_number=%(number)s', {"number": entry_number}) row = cursor.fetchone() if row is None: return create_empty_entry(entry_number) return { "entry-number": entry_number, "entry-timestamp": row['entry_timestamp'].strftime('%Y-%m-%d %H:%M:%S.%f'), "item-hash": row['item_hash'], "key": row['key'], "item-signature": row['item_signature'] } finally: commit(cursor)
def insert_item(item, item_hash, item_signature): app.logger.info("Insert item") cursor = start() try: entry_number = insert_item_in_transaction(cursor, item, item_hash, item_signature) commit(cursor) except Exception: # pragma: no cover rollback(cursor) raise return entry_number
def get_entries(page): current_app.logger.info("Get entries ") cursor = start() try: max_entry_number = count_entries(cursor) entry_list = read_entries(cursor, page.start, page.limit, max_entry_number) page.set_count(max_entry_number) finally: commit(cursor) current_app.logger.info("Returning %d entries", len(entry_list)) return Response(json.dumps(entry_list), mimetype='application/json')
def read_item(item_hash): app.logger.info("Read item '%s'", item_hash) cursor = start() try: cursor.execute('SELECT item FROM item ' 'WHERE item_hash = %(hash)s', {'hash': item_hash}) row = cursor.fetchone() if row is None: return None return row['item'] finally: commit(cursor)
def get_register(): app.logger.info("Get register") record = json.load(open(app.config['REGISTER_RECORD'])) cursor = start() try: response_data = { "domain": "TBC", "last-updated": get_lastest_update(cursor), "register-record": record, "total-entries": count_entries(cursor), "total-items": count_items(cursor), "total-records": count_all_records(cursor) } return Response(json.dumps(response_data), mimetype='application/json') finally: commit(cursor)
def insert_items(item_list): app.logger.info("Insert items") result = [] cursor = start() try: for item in item_list: entry_number = insert_item_in_transaction(cursor, item['item'], item['item-hash'], item['item-signature']) result.append({ 'item-hash': item['item-hash'], 'entry-number': entry_number }) commit(cursor) except Exception as e: # pragma: no cover app.logger.exception(str(e)) rollback(cursor) raise return result
def read_all_records(offset, limit): app.logger.info("Read %s records from %s", str(limit), str(offset)) key_field = app.config['REGISTER_KEY_FIELD'] cursor = start() number = count_all_records(cursor) try: cursor.execute( 'SELECT e2.entry_number, e2.entry_timestamp, e2.item_hash, e2.key, i.item ' 'FROM entry e2, item i ' 'WHERE e2.item_hash = i.item_hash ' 'AND e2.entry_number IN ( ' ' SELECT MAX(e.entry_number) ' ' FROM entry e ' ' GROUP BY e.key ' ') ' 'ORDER BY e2.entry_number ASC ' 'LIMIT %(limit)s OFFSET %(offset)s', { 'limit': limit, 'offset': offset }) rows = cursor.fetchall() result = {} for row in rows: result[row['key']] = { "entry-number": row['entry_number'], "entry_timestamp": row['entry_timestamp'].strftime('%Y-%m-%d %H:%M:%S.%f'), "item-hash": row['item_hash'], key_field: row['key'], 'item': row['item'] } app.logger.info("%d records returned", len(result)) return result, number finally: commit(cursor)
def get_register_proof(proof_identifier): current_app.logger.info("Get register proof") if proof_identifier != 'merkle:sha-256': current_app.logger.warning("Invalid proof identifier supplied: %s", proof_identifier) raise ApplicationError("Invalid proof identifier", "E400", 400) cursor = start() try: mtree_data = MerkleData(cursor) mtree = MerkleTree(mtree_data) result = { "proof-identifier": "merkle:sha-256", "tree-size": mtree.tree_size(), "timestamp": datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f'), "root-hash": "sha-256:" + b16encode(mtree.root_hash()).decode(), "tree-head-signature": "TODO" # TODO(signature) } current_app.logger.info("Returning register proof") return Response(json.dumps(result), mimetype='application/json') finally: commit(cursor)
def read_records_by_attribute(field_name, field_value): # TODO(pagination) app.logger.info("Read records where %s = %s", field_name, field_value) app.logger.warning("read_records_by_attribute: pagination not implemented") key_field = app.config['REGISTER_KEY_FIELD'] cursor = start() try: cursor.execute( "SELECT e.entry_number, e.entry_timestamp, e.item_hash, e.key, " " i.item " "FROM entry e, item i " "WHERE e.item_hash = i.item_hash " "AND i.item @> %(field)s " "ORDER BY e.entry_number ASC", {'field': json.dumps({field_name: field_value})}) rows = cursor.fetchall() result = {} for row in rows: result[row['key']] = { "entry-number": row['entry_number'], "entry_timestamp": row['entry_timestamp'].strftime('%Y-%m-%d %H:%M:%S.%f'), "item-hash": row['item_hash'], key_field: row['key'], 'item': row['item'] } app.logger.info("%d records returned", len(result)) return result finally: commit(cursor)
def republish_entry(entry_number, routing_key): app.logger.info("Republishing entry '%s'", entry_number) cursor = start() try: max_entry = count_entries(cursor) if entry_number > max_entry: app.logger.warning( "Entry number '%s' is greater than maximum entry '%s'", entry_number, max_entry) return False cursor.execute( 'SELECT e.entry_number, e.entry_timestamp, e.item_hash, e.key, i.item, e.item_signature ' 'FROM entry e ' 'JOIN item i on e.item_hash = i.item_hash ' 'WHERE entry_number=%(entry_number)s LIMIT 1', {'entry_number': entry_number}) entry_row = cursor.fetchone() prev_item = None if not entry_row: app.logger.info("No entry found for '%s', using empty entry", entry_number) entry = create_empty_entry(entry_number) else: entry = { "entry-number": entry_number, "entry-timestamp": entry_row['entry_timestamp'].strftime('%Y-%m-%d %H:%M:%S.%f'), "item-hash": entry_row['item_hash'], "key": entry_row['key'], "item-signature": entry_row['item_signature'], "item": entry_row['item'] } cursor.execute( 'SELECT e.entry_number, i.item ' 'FROM entry e ' 'JOIN item i on e.item_hash = i.item_hash ' 'WHERE e.key=%(key)s AND entry_number < %(entry_number)s ' 'ORDER BY entry_number DESC LIMIT 1', { 'key': entry['key'], 'entry_number': entry_number }) prev_entry_row = cursor.fetchone() if prev_entry_row: prev_item = prev_entry_row['item'] action_type = get_action_type(prev_item) message = entry message['action-type'] = action_type if action_type == 'UPDATED': message['item-changes'] = get_item_changes(entry['item'], prev_item) app.logger.info( "Created republish for entry '%s' with changes from previous entry '%s'", entry_number, prev_entry_row['entry_number']) else: app.logger.info("Created republish for entry '%s''", entry_number) app.logger.info( "Sending republish message to exchange '%s' with routing key '%s'", config.EXCHANGE_NAME, routing_key) app.logger.debug("Sending message '%s''", message) publish_message(message, config.RABBIT_URL, config.EXCHANGE_NAME, routing_key, queue_name=None, exchange_type=config.EXCHANGE_TYPE, serializer="json", headers=None) app.audit_logger.info("Entry '%s' republished to routing key '%s'", entry_number, routing_key) return True finally: commit(cursor)