def delete_envelope_by_uuid(uuid): """json route for deleting an envelope, expects envelope.uuid """ try: data = request.json # check supplier password if not db_session.query(exists().where( \ and_(Supplier.id == data["supplier_id"], \ Supplier.password == data["password"]))).scalar(): return jsonify({"failed": True, "invalid_password": True}) # if there are any parts that have this envelope, delete their relationship for part, _ in db_session.query(Part, Envelope).join(Envelope) \ .filter(Envelope.uuid == uuid).all(): part.envelope = None db_session.flush() delete_envelope(find_envelope(uuid)) return jsonify({"failed": False, \ "envelope_html": render_template("envelope_table.html", envelope=None)}) except: return jsonify({"failed": True, "error_message": stacktrace()})
def create_part(): """json route for creating a new part, expects a simple object containing the fields in the Part data model. """ data = request.json response_data = {} response_data["incorrect_password"] = not \ supplier_password_is_correct(data["supplier_id"], data["password"]) if response_data["incorrect_password"]: return jsonify(response_data) try: part = Part() # read off the fields from the posted data and set the new part's attributes for col in Part.__table__.columns: if col.key != "id": if col.key in data: setattr(part, col.key, data[col.key]) part.categories = [category for category in db_session.query(Category) \ if str(category.id) in data["categories"] ] supplier = db_session.query(Supplier).filter( Supplier.id == data["supplier_id"]).one() assert not part.blockchain or supplier.blockchain, "The supplier '" + supplier.name \ + "' is not registered with the blockchain network. None of its" \ + " software parts can be registered with the network." # call the ledger service to add this part and its relations to the blockchain if part.blockchain: part.save_to_blockchain() for category in part.categories: save_part_category_relation(part, category) save_part_supplier_relation(part, supplier) db_session.add(part) db_session.flush() db_session.commit() response_data["failed"] = False response_data["part_id"] = part.id except (APIError, AssertionError) as error: response_data["failed"] = True response_data["error_message"] = str(error) except: response_data["failed"] = True response_data["error_message"] = stacktrace() return jsonify(response_data)
def populate_categories(): """ask blockchain for categories and update the database """ if app.config["BYPASS_API_CALLS"] or app.config["BYPASS_LEDGER_CALLS"]: return print("Retrieving blockchain categories ...") categories = get_blockchain_categories() # delete old categories if their UUID's no longer exists new_uuids = [category_dict["uuid"] for category_dict in categories] parts = db_session.query(Part).all() for category in db_session.query(Category).all(): if category.uuid not in new_uuids: db_session.delete(category) # delete all the parts relations that had this category for part in parts: for part_category in part.categories: if part_category.uuid == category.uuid: part.categories.remove(part_category) db_session.flush() # update existing or insert new categories for category_dict in categories: category_query = db_session.query(Category).filter( Category.uuid == category_dict["uuid"]) if category_query.count() == 1: # update category = category_query.one() category.name = category_dict["name"] category.description = category_dict["description"] else: # insert category = Category() category.uuid = category_dict["uuid"] category.name = category_dict["name"] category.description = category_dict["description"] db_session.add(category) db_session.flush() db_session.commit()
def edit_part(): """json route for editing a part """ data = request.json response_data = {} # validate supplier password response_data["incorrect_password"] = \ not supplier_password_is_correct(data["supplier_id"], data["password"]) if response_data["incorrect_password"]: return jsonify(response_data) # make sure that the part exists part_query = db_session.query(Part).filter(Part.id == data["part_id"]) response_data["part_exists"] = (part_query.count() == 1) if not response_data["part_exists"]: return jsonify(response_data) # update the part try: part = part_query.one() for col in Part.__table__.columns: if col.key != "id": if col.key in data: setattr(part, col.key, data[col.key]) # update the categories if "categories" in data: part.categories = [category for category in db_session.query(Category).all() \ if category.id in data["categories"] ] db_session.flush() db_session.commit() response_data["failed"] = False except: response_data["failed"] = True response_data["error_message"] = stacktrace() return jsonify(response_data)
def create_supplier(): """json route for creating a new supplier. expects a simple object containing the fields in the Supplier data model. """ response_data = {"failed": False} try: data = request.json assert "supplier_name" in data, "Bad call, missing required 'supplier_name'." assert "password" in data, "Bad call, missing required 'password'." supplier_name = data["supplier_name"] pwd = data["password"] assert not db_session.query(exists().where(Supplier.name == supplier_name)).scalar(), \ "Another supplier with this name already exists." supplier = Supplier() supplier.name = supplier_name supplier.password = pwd supplier.blockchain = data["blockchain"] # call the ledger service to add this supplier to the blockchain if supplier.blockchain: supplier.save_to_blockchain() db_session.add(supplier) db_session.flush() db_session.commit() response_data["supplier_table_html"] = render_template("supplier_table.html", \ suppliers=db_session.query(Supplier)) except (APIError, AssertionError) as error: response_data["failed"] = True response_data["error_message"] = str(error) except: response_data["failed"] = True response_data["error_message"] = stacktrace() return jsonify(response_data)
def delete_part(): """json route for deleting a part. expects part_id. """ response_data = {} data = request.json # validate supplier password response_data["incorrect_password"] = \ not supplier_password_is_correct(data["supplier_id"], data["password"]) if response_data["incorrect_password"]: return jsonify(response_data) # make sure that the part exists part_query = db_session.query(Part).filter(Part.id == data["part_id"]) response_data["part_exists"] = (part_query.count() == 1) if not response_data["part_exists"]: return jsonify(response_data) try: part = part_query.one() db_session.delete(part) db_session.flush() if part.envelope: delete_envelope(part.envelope) db_session.commit() response_data["failed"] = False except: response_data["failed"] = True response_data["error_message"] = stacktrace() return jsonify(response_data)
def reset_handler(): """respond to conductor call RESET by purging the database and repopulating with sample data """ response_data = {} try: # clear all the tables for part in db_session.query(Part).all(): part.categories = [] part.envelope = None part.supplier = None db_session.delete(part) db_session.flush() db_session.query(Category).delete() db_session.query(Supplier).delete() for envelope in db_session.query(Envelope).all(): envelope.boms = [] envelope.artifacts = [] db_session.delete(envelope) db_session.flush() for bom in db_session.query(BOM).all(): bom.items = [] bom.artifact = None db_session.delete(bom) db_session.flush() db_session.query(Artifact).delete() db_session.query(BOMItem).delete() db_session.flush() db_session.commit() # delete all envelope and artifact files empty_directory(app.config["UPLOAD_FOLDER"]) empty_directory(app.config["ARTIFACT_FOLDER"]) # insert suppliers for supplier_dict in read_csv_file("suppliers.csv"): supplier = Supplier() supplier.name = supplier_dict["name"] supplier.uuid = supplier_dict["uuid"] supplier.password = hashlib.md5(codecs.encode(supplier_dict["password"], "utf-8"))\ .hexdigest() supplier.blockchain = (supplier_dict["blockchain"] == "true") if supplier.blockchain: supplier.save_to_blockchain() db_session.add(supplier) db_session.flush() # insert categories categories_by_uuid = {} for category_dict in read_csv_file("categories.csv"): category = Category() category.name = category_dict["name"] category.uuid = category_dict["uuid"] category.description = category_dict["description"] db_session.add(category) category.save_to_blockchain() categories_by_uuid[category.uuid] = category db_session.flush() # read part category association table part_category_instances = {} for part_category_relation in read_csv_file("part-categories.csv"): if part_category_relation[ "part_uuid"] not in part_category_instances: part_category_instances[ part_category_relation["part_uuid"]] = [] part_category_instances[part_category_relation["part_uuid"]].append( \ categories_by_uuid[part_category_relation["category_uuid"]]) # insert parts categories = db_session.query(Category).all() for part_dict in read_csv_file("parts.csv"): part = Part() part_supplier_query = db_session.query(Supplier)\ .filter(Supplier.uuid == part_dict["supplier_uuid"]) assert part_supplier_query.count() == 1, \ "Invalid supplier UUID in the following sample part. \n" \ + json.dumps(part_dict) + " Could not find a supplier with UUID '" \ + part_dict["supplier_uuid"] + "'" part.supplier = part_supplier_query.one() part.blockchain = (part_dict["blockchain"] == "true") for field in ["uuid", "usku", "supplier_part_id", "name", "version", \ "licensing", "url", "status", "description", "checksum", "src_uri"]: setattr(part, field, part_dict[field]) if part.uuid in part_category_instances: for category in part_category_instances[part.uuid]: part.categories.append(category) db_session.add(part) if part.blockchain: part.save_to_blockchain() for category in part.categories: save_part_category_relation(part, category) save_part_supplier_relation(part, part.supplier) db_session.flush() # read envelope part association table envelope_parts = {} for envelope_parts_dict in read_csv_file("part-envelopes.csv"): envelope_parts[envelope_parts_dict[ "envelope_uuid"]] = envelope_parts_dict["part_uuid"] # unpack and parse envelopes for envelope_path in glob.glob(\ os.path.join(app.config["SAMPLE_DATA_FOLDER"], "envelopes/*")): envelope = create_envelope(envelope_path) part_query = db_session.query(Part).filter( Part.uuid == envelope_parts[envelope.uuid]) assert part_query.count() == 1, \ "Invalid sample data. No part was found with UUID " + envelope_parts[envelope.uuid] part = part_query.one() part.envelope = envelope envelope.blockchain = part.blockchain if envelope.blockchain: envelope.save_to_blockchain() save_part_envelope_relation(part, envelope) db_session.flush() db_session.commit() response_data["status"] = "success" except AssertionError as error: response_data["status"] = "failed" response_data["error_message"] = str(error) except APIError as error: response_data["status"] = "failed" response_data["error_message"] = "Encountered an error while calling blockchain API. " \ + str(error) except (OSError, IOError): response_data["status"] = "failed" response_data["error_message"] = stacktrace() except: response_data["status"] = "failed" response_data[ "error_message"] = "Unhandled Exception \n\n" + stacktrace() return jsonify(response_data)
def delete_envelope(envelope): """delete envelope and all its associated artifacts, boms, and files """ if envelope.boms: boms = envelope.boms[:] envelope.boms = [] db_session.flush() for bom in boms: bomitems = bom.items[:] bom.items = [] db_session.flush() for item in bomitems: db_session.delete(item) db_session.flush() db_session.delete(bom) db_session.flush() artifacts = envelope.artifacts[:] envelope.artifacts = [] db_session.flush() for artifact in artifacts: db_session.delete(artifact) artifact_path = os.path.join(app.config["ARTIFACT_FOLDER"], artifact.checksum) if os.path.exists(artifact_path): os.remove(artifact_path) db_session.delete(envelope) db_session.flush() db_session.commit() shutil.rmtree(envelope.extract_dir)
def extract_and_parse_envelope(envelope_path, extract_path): """extract and parse the zip file at envelope_path and add it to the database and return the envelope instance """ base_path = os.path.dirname(envelope_path) filename = os.path.basename(envelope_path) toc_path = os.path.join(extract_path, "_TOC.json") try: with zipfile.ZipFile(envelope_path) as envelope: envelope.extractall(extract_path) except: raise EnvelopeError("Failed to extract the zip file '" + str(filename) \ + "'. The archive was corrupt.") # read the table of contents assert os.path.exists(toc_path), \ "Invalid evelope. Missing required table of contentes file _TOC.json" toc = None envelope = Envelope() try: with open(toc_path, "r") as toc_file: toc = toc_file.read() except: raise EnvelopeError( "Failed to read the table of contents file _TOC.json.") try: toc = json.loads(toc) except: raise EnvelopeError( "Failed to parse JSON data in the table of contents.") assert "artifacts" in toc, \ "Invalid JSON data in the table of contents, missing required field 'artifacts'." envelope.toc = json.dumps(toc) toc = toc["artifacts"] assert isinstance(toc, list), "Invalid JSON data in table of contents." \ + " Expected a list of artifacts. Got <pre>" \ + json.dumps(toc, indent=True) + "</pre>" for artifact in toc: for col in Artifact.__table__.columns: if col.key != "id" and col.nullable is False: assert col.key in artifact, "Invalid JSON data in the table of contents." \ + "The following artifact was missing required field '" + col.key + "'." \ + "<br><br><pre>" + json.dumps(artifact, indent=True) + "</pre>" \ + "Note that field names are case sensitive and must appear " \ + "exactly as specified." # # the code below will throw an error if another artifact with this UUID already # existed. instead can link envelope to the existing artifact or overwrite old artifact. # assert not db_session.query(exists().where(Artifact.uuid == artifact["uuid"])).scalar(), \ "Your envelope contained an artifact with UUID = '" + artifact["uuid"] \ + "'. However, another artifact with that UUID already exists in the database. "\ + "UUID's must be unique." # create a new envelope envelope.extract_dir = base_path # insert artifacts data in the envelope for artifact_dict in toc: artifact = Artifact() for col in Artifact.__table__.columns: if col.key in artifact_dict: setattr(artifact, col.key, artifact_dict[col.key]) # if this is the envelope artifact, set the attributes of the envelope if artifact.content_type == "this": envelope.uuid = artifact.uuid envelope.short_id = artifact.short_id envelope.checksum = artifact.checksum envelope.openchain = artifact.openchain envelope.filename = artifact.filename envelope.label = artifact.label db_session.add(envelope) db_session.flush() # otherwise add it to the envelopes's list of artifacts else: try: parsed_uri = urlparse(artifact.uri) except: raise EnvelopeError("Failed to parse the URI '" + artifact.uri \ + "' for the following artifact. <br><pre>" \ + json.dumps(artifact_dict, indent=True) + "</pre>") assert parsed_uri.scheme != "", \ "There was no scheme in the URI given for the following artifact <br><pre>" \ + json.dumps(artifact_dict, indent=True) + "</pre>" if parsed_uri.scheme == "envelope": assert parsed_uri.netloc != "", "Missing net location for the URI given for " \ + " the following artifact <br><pre>" \ + json.dumps(artifact_dict, indent=True) + "</pre>" assert artifact.path != "" and artifact.path is not None, "Missing path for" \ + " the following artifact <br><pre>" \ + json.dumps(artifact_dict, indent=True) + "</pre>" assert artifact.path[0] == "/", "Invalid path in " \ + " the following artifact <br><pre>" \ + json.dumps(artifact_dict, indent=True) + "</pre>" \ + " <br> Paths must begin with a slash (/) symbol." if len(artifact.path) > 1: assert artifact.path[-1] != "/", "Invalid path in " \ + " the following artifact <br><pre>" \ + json.dumps(artifact_dict, indent=True) + "</pre>" \ + "<br> Paths should not end with a slash." artifact_path = os.path.join(base_path, "envelope") artifact_path = os.path.join(artifact_path, parsed_uri.netloc) artifact_path = os.path.normpath(artifact_path + artifact.path) artifact_path = os.path.join(artifact_path, artifact.filename) assert os.path.exists(artifact_path), \ "The table of contents pointed to the artifact " \ + "<br><pre>" + json.dumps(artifact_dict, indent=True) \ + "</pre> <br> But it didn't exist in the envelope." with open(artifact_path, "rb") as artifact_file: artifact_content = artifact_file.read() artifact_checksum = hashlib.sha1( artifact_content).hexdigest() # # TODO: enable validating checksums # # assert artifact_checksum == artifact.checksum, "Invalid checksum for " \ # + " the following artifact <br><pre>" \ # + json.dumps(artifact_dict, indent=True) + "</pre>" \ # + "<br> Expected '" + artifact_checksum + "', " \ # + "got '" + artifact.checksum \ # + "'. The files provided in this envelope might be corrupt." artifact.checksum = artifact_checksum # copy the artifact to the artifacts folder shutil.copyfile(artifact_path, \ os.path.join(app.config["ARTIFACT_FOLDER"], artifact.checksum)) # parse bill of materials if artifact.content_type == "oss_bom" and artifact.filename[ -7:] == ".ossbom": with open(artifact_path, "r") as bom_file: try: bom_data = json.loads(bom_file.read()) except: raise EnvelopeError("Failed to parse the JSON in following " \ + "bill of materials artifact:" \ + "<br><br><pre>" + json.dumps(artifact_dict, indent=True) \ + "</pre><br>Please make sure the file '" \ + artifact.filename + "' contains valid JSON data.") bom = BOM() for col in BOM.__table__.columns: if col.key != "id" and not col.nullable: assert col.key in bom_data, \ "Invalid item in the following bill of materials artifact:" \ + "<br><br><pre>" + json.dumps(artifact_dict, indent=True) \ + "</pre><br>Missing required field '" + col.key + "'" if col.key in bom_data and col.key != "items": setattr(bom, col.key, bom_data[col.key]) assert "items" in bom_data, "Missing required field 'items'" \ + " in the following bill of materials artifact: <br><br><pre>" \ + json.dumps(artifact_dict, indent=True) + "</pre><br>" for bom_item_dict in bom_data["items"]: bom_item = BOMItem() for col in BOMItem.__table__.columns: if col.key != "id" and not col.nullable: assert col.key in bom_item_dict, \ "Invalid JSON data in the following bill of materials" \ + " artifact: <br><br><pre>" \ + json.dumps(artifact_dict, indent=True) \ + "</pre><br> The following entry was misising " \ + "required field '" + col.key + "'. " + "<br><pre>" \ + json.dumps(bom_item_dict, indent=True) \ + "</pre><br>Note that field names are case sensitive " \ + "and must appear exactly as specified." if col.key in bom_item_dict: setattr(bom_item, col.key, bom_item_dict[col.key]) assert bom_item.path[0] == "/", "Invalid path in " \ + " the following BOM entry <br><pre>" \ + json.dumps(bom_item_dict, indent=True) + "</pre>" \ + " <br> Paths must begin with a slash (/) symbol." if len(bom_item.path) > 1: assert bom_item.path[-1] != "/", "Invalid path in " \ + " the following BOM entry <br><pre>" \ + json.dumps(bom_item_dict, indent=True) + "</pre>" \ + "<br> Paths should not end with a slash." bom.items.append(bom_item) bom.artifact = artifact db_session.add(bom) db_session.flush() envelope.boms.append(bom) db_session.flush() envelope.artifacts.append(artifact) db_session.flush() db_session.commit() # # TODO: add envelope to the blockchain # return envelope
def upload_envelope(): """route for uploading an envelope file""" response_data = {} # compute a unique, random extract path directory name baseed on time base_dirname = hashlib.sha1( codecs.encode(str(datetime.datetime.now()), "utf-8")).hexdigest() base_path = os.path.join(app.config["UPLOAD_FOLDER"], base_dirname) try: assert "envelope" in request.files, "Envelope file was not submitted" file = request.files["envelope"] assert file.filename != "", "No file was selected" filename = secure_filename(file.filename) file_path = os.path.join(base_path, filename) extract_path = os.path.join(base_path, "envelope") assert not os.path.exists(base_path), \ "Could not create unique directory to extract envelope files." + \ " Please try again later." try: os.makedirs(extract_path) except: raise EnvelopeError( "Failed to create directory to extract envelope") file.save(file_path) # to ensure atomic operation on this file os.rename(file_path, file_path) assert "part_id" in request.args, "Invalid request, part_id was missing." part_query = db_session.query(Part).filter( Part.id == request.args["part_id"]) assert part_query.count( ) == 1, "Part no longer existed in the database." part = part_query.one() envelope = extract_and_parse_envelope(file_path, extract_path) envelope.blockchain = part.blockchain part.envelope_id = envelope.id db_session.flush() db_session.commit() #remove zip file os.remove(file_path) response_data["successfully_uploaded"] = True response_data["envelope_html"] = render_template("envelope_table.html", envelope=envelope) except (AssertionError, EnvelopeError) as error: response_data["error_message"] = str(error) except: response_data["error_message"] = stacktrace() # delete the extracted files in case something went wrong if "error_message" in response_data: shutil.rmtree(base_path) return jsonify(response_data)