def test_add_license_attr():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    ts1 = v.last_updated
    assert p.last_updated is not None
    assert ts1 is not None

    license_data = serve_static_json["analyses"]["source_licenses"]
    _, _, licenses = LicenseDetails.load_from_json(license_data)
    v.add_license_attribute(licenses)
    assert v.last_updated >= ts1

    version_criteria = {
        'pecosystem': v.ecosystem_package.ecosystem,
        'pname': v.ecosystem_package.name,
        'version': v.version
    }
    present_version = Version.find_by_criteria('Version', p, version_criteria)
    assert present_version.last_updated == v.last_updated
    assert (len(present_version.licenses) == 1)
    test_set = ['MITNFA']

    assert present_version.licenses == test_set

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_support_vector_github_detail():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    github_data = serve_static_json["analyses"]["github_details"]
    github_result = GithubResult.load_from_json(github_data)
    assert github_result.last_updated is None

    v.add_edge_github_details(github_result)
    ls_before = github_result.last_updated
    assert (GithubResult.count() == 1)

    count_before = len(v.get_version_out_edge('has_github_details'))
    assert count_before == 1

    #try adding the edge again
    v.add_edge_github_details(github_result)
    count_after = len(v.get_version_out_edge('has_github_details'))
    ls_after = github_result.last_updated
    assert count_before == count_after
    assert ls_after >= ls_before

    GithubResult.delete_by_id(github_result.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
예제 #3
0
def test_blackduck_graph():
    bl_list = []
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    bl_list = bl.load_from_json(input_json['analyses'])
    assert len(bl_list) == 1

    objBlackduck = bl.add_blackduck_issue(bl_list[0])
    v.add_blackduck_cve_edge(objBlackduck.id)

    bl_criteria = {'vulnerability_name': 'CVE-2015-1164'}
    obj_fetch = SecurityDetails.find_by_criteria('CVE', bl_criteria)
    assert obj_fetch.last_updated == objBlackduck.last_updated

    SecurityDetails.delete_by_id(obj_fetch.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_person_contributor():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    contributors_data = serve_static_json["analyses"]["metadata"]
    contributors_list = Contributor.load_from_json(contributors_data)
    for c in contributors_list:
        c.save()
        assert c.last_updated is not None
        v.add_edge_contributor(c)

    assert (Contributor.count() == 0)

    for c in contributors_list:
        Contributor.delete_by_id(c.id)

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_person_author():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    authors_data = serve_static_json["analyses"]["metadata"]
    authors_list = Author.load_from_json(authors_data)
    ts_list = []
    for a in authors_list:
        a.save()
        assert a.last_updated is not None
        ts_list.append(a.last_updated)
        v.add_edge_author(a)

    author_before = Author.count()
    assert (author_before == 1)

    author_detail = Author(name='Douglas Christopher Wilson',
                           email='*****@*****.**')
    author_detail.save()
    assert author_detail.last_updated >= a.last_updated
    assert (Author.count() == 1)

    for a in authors_list:
        Author.delete_by_id(a.id)

    Author.delete_by_id(author_detail.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
예제 #6
0
def load_dependencies(ecosystem, dependency_data):
    dependency_pck_list = []
    dependency_ver_list = []
    dependency_type = []
    dependencies = get_dependencies(dependency_data)

    for dep_type, dep_list in dependencies.items():
        for d in dep_list:
            splits = d.split(" ")

            (n, v) = ("", "")
            if len(splits) >= 1:
                n = splits[0]
            if len(splits) >= 2:
                v = splits[1]

            pck_criteria_dict = {'ecosystem': ecosystem, 'name': n}
            pck_dep = Package.find_by_criteria(
                'Package', pck_criteria_dict) or Package(ecosystem, n)

            ver_criteria_dict = {
                'pecosystem': ecosystem,
                'pname': n,
                'version': v
            }
            ver_dep = Version.find_by_criteria(
                'Version', pck_dep, ver_criteria_dict) or Version(pck_dep, v)

            dependency_pck_list.append(pck_dep)
            dependency_ver_list.append(ver_dep)
            dependency_type.append(dep_type)

    return dependency_pck_list, dependency_ver_list, dependency_type
예제 #7
0
def test_empty_github_results():
    p = Package.load_from_json(npm_sequence_3)
    assert p.save() is not None
    v = Version.load_from_json(npm_sequence_3, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    # input_json = gv.read_from_file('test/data/npm-sequence-3.0.0.json')
    github_data = npm_sequence_3["analyses"]["github_details"]
    github_result = GithubResult.load_from_json(github_data)
    assert (github_result.details is not None)
    assert github_result.last_updated is None

    # gid = github_result.save()
    v.add_edge_github_details(github_result)
    ls_before = github_result.last_updated
    assert ls_before is not None
    assert (github_result.id is not None)

    v.add_edge_github_details(github_result)
    ls_after = github_result.last_updated
    assert ls_after >= ls_before
    assert GithubResult.count() == 1

    GithubResult.delete_by_id(github_result.id)

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_support_vector_security():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    security_data = serve_static_json["analyses"]["security_issues"]
    security_list, cvss_score, cve_ids = SecurityDetails.load_from_json(
        security_data)
    ts_list = []
    for s, cvss, cve in zip(security_list, cvss_score, cve_ids):
        s.save()
        assert s.last_updated is not None
        ts_list.append(s.last_updated)
        v.add_security_edge(s, cvss)

    security_before = SecurityDetails.count()
    assert (security_before == 1)

    present_security = SecurityDetails.find_by_criteria(
        'CVE', {'cve_id': 'CVE-2015-1164'})
    assert (len(present_security.references) == 5)
    ref_list = [
        "https://github.com/expressjs/serve-static/issues/26",
        "https://bugzilla.redhat.com/show_bug.cgi?id=1181917",
        "http://xforce.iss.net/xforce/xfdb/99936",
        "http://www.securityfocus.com/bid/72064",
        "http://nodesecurity.io/advisories/serve-static-open-redirect"
    ]
    assert (all(r in ref_list for r in present_security.references))

    repeat_security_detail = SecurityDetails(cve_id='CVE-2015-1164',
                                             cvss=4.3,
                                             summary='')
    repeat_security_detail.issue_has_access('authentication', '')
    repeat_security_detail.issue_has_access('vector', 'NETWORK')
    repeat_security_detail.issue_has_access('complexity', 'MEDIUM')
    repeat_security_detail.issue_has_impact('integrity', 'partial')
    repeat_security_detail.issue_has_impact('confidentiality', '')
    repeat_security_detail.issue_has_impact('availability', '')

    repeat_security_detail.save()
    assert repeat_security_detail.id == s.id
    assert repeat_security_detail.last_updated >= ts_list[0]
    assert (SecurityDetails.count() == 1)

    for s in security_list:
        SecurityDetails.delete_by_id(s.id)

    SecurityDetails.delete_by_id(repeat_security_detail.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
예제 #9
0
def test_bucket_import_from_folder():
    bucket_dir = 'test/data/S3-data'

    packages = Package.find_all()
    assert (len(packages) == 0)

    p = Package()
    assert (p.id is None)
    assert (p.label == "Package")
    assert p.last_updated is None

    report = import_from_folder(bucket_dir)

    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 3)
    assert (report.get('last_imported_EPV') == 'npm/sequence/3.0.0.json')
    assert (report.get('max_finished_at') == '2017-02-24T13:43:11.872916')

    criteria_dict = {'ecosystem': 'npm', 'name': 'sequence'}
    p1 = Package.find_by_criteria('Package', criteria_dict)

    assert p1 is not None
    assert p1.id is not None
    assert (p1.ecosystem == 'npm')
    assert (p1.latest_version == '3.0.0')
    assert (p1.package_dependents_count == 22)
    assert (p1.name == 'sequence')

    criteria_dict = {
        'pecosystem': 'npm',
        'pname': 'sequence',
        'version': '3.0.0'
    }
    v1 = Version.find_by_criteria('Version', p1, criteria_dict)

    assert v1 is not None
    assert v1.ecosystem_package is not None
    assert (v1.ecosystem_package.ecosystem == 'npm')

    packages = Package.find_all()
    assert (len(packages) == 3)

    versions = Version.find_all()
    assert (len(versions) == 4)

    Package.delete_all()
    assert (Package.count() == 0)

    Version.delete_all()
    assert (Version.count() == 0)

    GraphMetaData.delete_all()
    assert (GraphMetaData.count() == 0)
예제 #10
0
def test_additional_data_as_attr():

    add_details = {}
    pck_obj = Package.load_from_json(serve_static_json)
    assert pck_obj.last_updated is None
    assert pck_obj.save() is not None

    ver_obj = Version.load_from_json(serve_static_json, package=pck_obj)
    assert ver_obj.last_updated is None
    ver_obj.save()
    v_ts1 = ver_obj.last_updated
    assert pck_obj.last_updated is not None
    assert v_ts1 is not None

    pck_obj.create_version_edge(ver_obj)
    code_metrics = CMR.load_from_json(
        npm_crumb_data["analyses"]["code_metrics"])

    security_data = serve_static_json["analyses"]["security_issues"]
    security_list, cvss_score, cve_ids = SD.load_from_json(security_data)

    if len(cvss_score) > 0 and len(cve_ids) > 0:
        cve_ids = cve_ids
        cvss = cvss_score

        add_details["cve_ids"] = cve_ids[0]
        add_details["cvss"] = cvss[0]

    ver_obj.add_additional_data_as_attr(code_metrics)

    ver_obj.add_cve_ids(add_details["cvss"], add_details["cve_ids"])
    assert ver_obj.last_updated >= v_ts1

    version_criteria = {
        'pecosystem': pck_obj.ecosystem,
        'pname': pck_obj.name,
        'version': ver_obj.version
    }

    present_version = Version.find_by_criteria(ver_obj.label, pck_obj,
                                               version_criteria)
    logger.info(present_version.__dict__)
    assert present_version.cm_loc == 1351
    assert present_version.cm_num_files == 16
    assert present_version.cm_avg_cyclomatic_complexity == -1
    assert present_version.cve_ids == ['CVE-2015-1164:7.5']
    assert present_version.last_updated == ver_obj.last_updated

    Version.delete_by_id(ver_obj.id)
    Package.delete_by_id(pck_obj.id)
예제 #11
0
def test_ecosystem_import_from_folder():
    ecosystem_dir = 'test/data/S3-data/maven'

    packages = Package.find_all()
    assert (len(packages) == 0)

    p = Package()
    assert (p.id is None)
    assert (p.label == "Package")
    assert p.last_updated is None

    report = import_from_folder(ecosystem_dir)

    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 2)
    assert (
        report.get('last_imported_EPV') == 'org.slf4j:slf4j-api/1.5.6.json')
    assert (report.get('max_finished_at') == '2017-02-24T13:43:11.872916')

    criteria_dict = {'ecosystem': 'maven', 'name': 'junit:junit'}
    p1 = Package.find_by_criteria('Package', criteria_dict)

    criteria_dict = {
        'pecosystem': 'maven',
        'pname': 'junit:junit',
        'version': '4.8.2'
    }
    v1 = Version.find_by_criteria('Version', p1, criteria_dict)

    assert v1 is not None
    assert v1.ecosystem_package is not None
    assert (v1.ecosystem_package.ecosystem == 'maven')

    packages = Package.find_all()
    assert (len(packages) == 2)

    versions = Version.find_all()
    assert (len(versions) == 3)

    Package.delete_all()
    assert (Package.count() == 0)

    Version.delete_all()
    assert (Version.count() == 0)

    GraphMetaData.delete_all()
    assert (GraphMetaData.count() == 0)
def test_create_package_entity():

    packages = Package.find_all()
    assert (len(packages) == 0)

    list_epv_1 = [{
        'ecosystem': 'maven',
        'name': 'org.slf4j:slf4j-api',
        'version': '1.5.6'
    }]
    import_epv_from_folder('test/data/S3-data', list_epv=list_epv_1)

    criteria_dict = {'ecosystem': 'maven', 'name': 'org.slf4j:slf4j-api'}
    p = Package.find_by_criteria('Package', criteria_dict)
    assert p.latest_version == '1.7.22'

    p.save()  # must be an update
    assert (Package.count() == 2)

    p.create()  # duplicate should not create new node
    assert (Package.count() == 2)

    criteria_dict = {'ecosystem': 'maven', 'name': 'junit:junit'}
    p2 = Package.find_by_criteria('Package', criteria_dict)

    assert p2.latest_version == '-1'

    list_epv_2 = [{
        'ecosystem': 'maven',
        'name': 'junit:junit',
        'version': '4.8.2'
    }]
    import_epv_from_folder('test/data/S3-data', list_epv=list_epv_2)

    criteria_dict = {'ecosystem': 'maven', 'name': 'junit:junit'}
    p3 = Package.find_by_criteria('Package', criteria_dict)
    assert p3.latest_version == '4.12'

    p.save()  # must be an update
    assert (Package.count() == 2)

    Package.delete_all()
    assert (Package.count() == 0)

    Version.delete_all()
    assert (Version.count() == 0)
def test_version_dependencies():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)
    dependency_data = serve_static_json["analyses"]["dependency_snapshot"]
    dependency_pck_list, dependency_ver_list, dependency_type = \
        vdv.load_dependencies(v.ecosystem_package.ecosystem, dependency_data)
    for d_pck, d_ver, d_type in zip(dependency_pck_list, dependency_ver_list,
                                    dependency_type):
        assert d_pck.save() is not None
        d_ver.save()
        d_pck.create_version_edge(d_ver)
        v.add_edge_dependency(d_ver, d_type)

    assert (Version.count_dependency(v.id) == 4)
    assert (Version.count() + Package.count() == 10)

    for pd, vd in zip(dependency_pck_list, dependency_ver_list):
        Version.delete_by_id(vd.id)
        Package.delete_by_id(pd.id)

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_add_code_metrics_non_empty():
    p = Package.load_from_json(npm_crumb_data)
    assert p.save() is not None

    v = Version.load_from_json(npm_crumb_data, package=p)
    v.save()
    p.create_version_edge(v)
    assert p.last_updated is not None
    assert v.last_updated is not None
    code_metrics_data = npm_crumb_data["analyses"]["code_metrics"]
    code_metrics = CodeMetricsResult.load_from_json(code_metrics_data)
    assert code_metrics.last_updated is None
    assert code_metrics.id is None

    v.add_code_metrics_edge(code_metrics)
    assert code_metrics.id >= 0
    assert code_metrics.last_updated is not None
    assert code_metrics.last_updated > v.last_updated
    assert (CodeMetricsResult.count() == 1)
    assert (CodeMetricsLanguage.count() == 6)

    count_before = len(v.get_version_out_edge('has_code_metrics'))
    assert count_before == 1

    #try adding the edge again, should not let this happen
    results2 = v.add_code_metrics_edge(code_metrics)

    count_after = len(v.get_version_out_edge('has_code_metrics'))
    assert count_after == count_before
    assert CodeMetricsLanguage.count() == 6

    lang_nodes = CodeMetricsLanguage.find_all()

    for lang_node in lang_nodes:
        CodeMetricsLanguage.delete_by_id(lang_node.id)

    CodeMetricsLanguage.delete_by_id(code_metrics.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
예제 #15
0
def test_package_import_from_folder():
    package_dir = 'test/data/S3-data/maven/junit:junit'

    packages = Package.find_all()
    assert (len(packages) == 0)

    versions = Version.find_all()
    assert (len(versions) == 0)

    p = Package()
    assert (p.id is None)
    assert (p.label == "Package")
    assert p.last_updated is None

    report = import_from_folder(package_dir)

    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 1)
    assert (report.get('last_imported_EPV') == '4.8.2.json')
    assert (report.get('max_finished_at') == '2017-02-24T13:42:29.665786')

    criteria_dict = {'ecosystem': 'maven', 'name': 'junit:junit'}
    p1 = Package.find_by_criteria('Package', criteria_dict)

    assert p1 is not None
    assert p1.id is not None
    assert (p1.ecosystem == 'maven')
    assert (p1.latest_version == '4.12')
    assert (p1.package_dependents_count == -1)
    assert (p1.name == 'junit:junit')

    criteria_dict = {
        'pecosystem': 'maven',
        'pname': 'junit:junit',
        'version': '4.8.2'
    }
    v1 = Version.find_by_criteria('Version', p1, criteria_dict)

    assert v1 is not None
    assert v1.ecosystem_package is not None
    assert (v1.ecosystem_package.ecosystem == 'maven')

    packages = Package.find_all()
    assert (len(packages) == 1)

    versions = Version.find_all()
    assert (len(versions) == 1)

    Package.delete_by_id(p1.id)
    assert (Package.count() == 0)

    Version.delete_by_id(v1.id)
    assert (Version.count() == 0)

    GraphMetaData.delete_all()
    assert (GraphMetaData.count() == 0)
def test_support_vector_license():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    license_data = serve_static_json["analyses"]["source_licenses"]
    license_details_list, license_counts_list, _ = LicenseDetails.load_from_json(
        license_data)
    ts_list = []
    for license_detail, license_count in zip(license_details_list,
                                             license_counts_list):
        license_detail.save()
        assert license_detail.last_updated is not None
        ts_list.append(license_detail.last_updated)
        v.add_license_edge(license_detail, license_count)

    assert (LicenseDetails.count() == 1)

    new_license_detail = LicenseDetails(name='MITNFA')
    new_license_detail.save()
    assert new_license_detail.last_updated >= ts_list[0]

    # Duplicate license should not be inserted
    assert (LicenseDetails.count() == 1)

    for l_id in license_details_list:
        LicenseDetails.delete_by_id(l_id.id)
        LicenseDetails.delete_by_id(new_license_detail.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def load_package_version_values(filename):

    input_json = gv.read_from_file(filename)

    objpackage = Package.load_from_json(input_json)

    objversion = Version.load_from_json(input_json, objpackage)

    github_data = input_json["analyses"]["github_details"]
    github_result = GithubResult.load_from_json(github_data)
    objversion.github_details = github_result

    authors_data = input_json["analyses"]["metadata"]
    authors_list = Author.load_from_json(authors_data)
    for objauthor in authors_list:
        objversion.version_authored_by(objauthor)

    contributors_data = input_json["analyses"]["metadata"]
    contributor_list = Contributor.load_from_json(contributors_data)
    for objcontributor in contributor_list:
        objversion.version_contributed_by(objcontributor)

    license_data = input_json["analyses"]["source_licenses"]
    license_details_list, license_counts_list, license_names = LicenseDetails.load_from_json(
        license_data)
    for objlicense, license_count in zip(license_details_list,
                                         license_counts_list):
        objversion.version_covered_under(objlicense, license_count)

    objversion.licenses = license_names

    dependency_data = input_json["analyses"]["dependency_snapshot"]
    _, dependency_ver_list, dependency_type = \
        vdv.load_dependencies(
            objversion.ecosystem_package.ecosystem, dependency_data)
    for d_ver, d_type in zip(dependency_ver_list, dependency_type):
        objversion.version_depends_on(d_ver, d_type)

    security_data = input_json["analyses"]["security_issues"]
    security_list, __, _ = SecurityDetails.load_from_json(security_data)
    for objsecurity in security_list:
        objversion.version_has_nvd_issues(objsecurity)

    return objversion
def test_version_entity():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    assert (Version.count() == 1)
    ls_before = v.last_updated
    assert ls_before is not None

    criteria_dict = {
        'pecosystem': 'npm',
        'pname': 'serve-static',
        'version': '1.7.1'
    }
    v2 = Version.find_by_criteria('Version', p, criteria_dict)
    assert v2.last_updated == v.last_updated

    v.save()
    ls_after = v.last_updated
    assert (Version.count() == 1)
    assert ls_after >= ls_before
    assert v.last_updated >= v2.last_updated

    test_packaged_in = ['nodejs-serve-static']
    test_published_in = []
    assert (all(pck in test_packaged_in for pck in v.is_packaged_in))
    assert (all(pub in test_published_in for pck in v.is_published_in))

    # now create an edge
    edge_count_before = Package.edge_count()
    p.create_version_edge(v)
    edge_count_after = Package.edge_count()
    assert (edge_count_after == edge_count_before + 1)

    # now try to create an edge again
    edge_count_before = Package.edge_count()
    p.create_version_edge(v)
    edge_count_after = Package.edge_count()
    assert (edge_count_after == edge_count_before)

    # this should return all versions associated with this package
    p_versions = p.get_versions()
    assert (len(p_versions) == 1)

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
예제 #19
0
    def populate_from_json(cls, input_json):

        # NPM packages with dependencies, versions i.e. Package version
        # insertion
        logger.info("Instantiating package ...")
        package = Package.load_from_json(input_json)
        logger.info("Saving package ...")
        pkg_id = package.save()
        logger.info(" Package node ID: %s" % pkg_id)
        if pkg_id is None:
            return
        version = Version.load_from_json(input_json, package=package)
        ver_id = version.save()
        logger.info(" Version node ID: %s" % ver_id)
        package.create_version_edge(version)

        analyses = input_json["analyses"]
        if "dependency_snapshot" in analyses:
            dependency_snapshot = analyses["dependency_snapshot"]
            dependency_pck_list, dependency_ver_list, dependency_type = vdv.load_dependencies(
                version.ecosystem_package.ecosystem, dependency_snapshot)
            for d_pck, d_ver, d_type in zip(dependency_pck_list,
                                            dependency_ver_list,
                                            dependency_type):
                d_pck.save()
                if d_pck is None:
                    continue
                d_ver.save()
                d_pck.create_version_edge(d_ver)
                version.add_edge_dependency(d_ver, d_type)

        if "metadata" in analyses:
            meta_data = analyses["metadata"]
            print("  Adding authors_list")
            authors_list = Author.load_from_json(meta_data)
            for author in authors_list:
                a_id = author.save()
                print("    author ID: %s" % a_id)
                version.add_edge_author(author)

            print("  Adding contributor_list")
            contributor_list = Contributor.load_from_json(meta_data)
            for contributor in contributor_list:
                c_id = contributor.save()
                print("    contributor ID: %s" % c_id)
                version.add_edge_author(contributor)

        # License Information
        if "source_licenses" in analyses:
            print("  Adding source_licenses")
            licenses = set()
            license_data = analyses["source_licenses"]
            license_details_list, license_counts_list, licenses = LicenseDetails.load_from_json(
                license_data)
            for used_license, license_count in zip(license_details_list,
                                                   license_counts_list):
                lic_id = used_license.save()
                print("    license_data ID: %s" % lic_id)
                version.add_license_edge(used_license, license_count)

            version.add_license_attribute(licenses)

        # NVD Security Information
        if "security_issues" in analyses:
            print("  Adding security_issues")
            security_data = analyses["security_issues"]
            security_list, cvss_score, cve_ids = SecurityDetails.load_from_json(
                security_data)
            for s, cvss, cve_id in zip(security_list, cvss_score, cve_ids):
                ss_id = s.save()
                print("    security_data ID: %s" % ss_id)
                version.add_security_edge(s, cvss)
                version.add_cve_ids(cvss, cve_id)

        # GitHub Details
        if "github_details" in analyses:
            print("  Adding github_details")
            github_data = analyses["github_details"]
            github_result = GithubResult.load_from_json(github_data)
            package.add_github_details_as_attr(github_result)
            version.add_edge_github_details(github_result)

        # Code Metrics
        if "code_metrics" in analyses:
            print("  Adding code_metrics")
            code_metrics_data = analyses["code_metrics"]
            code_metrics = CodeMetricsResult.load_from_json(code_metrics_data)
            version.add_code_metrics_edge(code_metrics)
            version.add_additional_data_as_attr(code_metrics)

        if "blackduck" in analyses:
            print("Adding extra security info via blackduck")
            blackduck_cve = analyses["blackduck"]
            issue_list = bl.load_from_json(blackduck_cve)
            for issue in issue_list:
                bl_obj = bl.add_blackduck_issue(issue)
                version.add_blackduck_cve_edge(bl_obj.id)
예제 #20
0
def test_full_import_and_incr_update():
    data_dir = 'test/data'
    # Let us make sure that target graph has no metadata
    graph_meta = GraphPopulator.get_metadata()
    assert (graph_meta is None)

    # Full import: insert all the EPVs from the given data source
    src_dir = os.path.join(data_dir, 'full_import')
    report = import_bulk(data_source=LocalFileSystem(src_dir=src_dir),
                         book_keeper=None)
    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 1)
    assert (report.get('last_imported_EPV') == 'npm/serve-static/1.7.1.json')
    assert (report.get('max_finished_at') == '2017-02-08T12:26:51.962609')

    graph_meta = GraphPopulator.get_metadata()
    assert (graph_meta is not None)
    assert (graph_meta.last_incr_update_ts == '2017-02-08T12:26:51.962609')

    # Incremental update 1:
    # Let us mimic a scenario where a new EPV was inserted recently: npm/send/0.10.1
    src_dir = os.path.join(data_dir, 'incr_update1')
    book_keeping_json = os.path.join(data_dir, 'book_keeping1.json')
    report = import_bulk(
        data_source=LocalFileSystem(src_dir=src_dir),
        book_keeper=JsonBookKeeper(json_file_name=book_keeping_json))
    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 1)
    assert (report.get('last_imported_EPV') == 'npm/send/0.10.1.json')
    assert (report.get('max_finished_at') == '2017-02-22T15:34:59.469864')

    graph_meta = GraphPopulator.get_metadata()
    assert (graph_meta is not None)
    assert (graph_meta.last_incr_update_ts == '2017-02-22T15:34:59.469864')

    # Incremental update 2:
    # Let us mimic a scenario where a new EPV was inserted recently: npm/parseurl/1.3.1
    # and also an already existing EPV was updated recently: npm/serve-static/1.7.1
    src_dir = os.path.join(data_dir, 'incr_update2')
    book_keeping_json = os.path.join(data_dir, 'book_keeping2.json')
    report = import_bulk(
        data_source=LocalFileSystem(src_dir=src_dir),
        book_keeper=JsonBookKeeper(json_file_name=book_keeping_json))
    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 2)
    assert (report.get('last_imported_EPV') == 'npm/serve-static/1.7.1.json')
    assert (report.get('max_finished_at') == '2017-02-22T15:35:51.962609')

    graph_meta = GraphPopulator.get_metadata()
    assert (graph_meta is not None)
    assert (graph_meta.last_incr_update_ts == '2017-02-22T15:35:51.962609')

    # Cleanup
    GraphMetaData.delete_all()
    assert (GraphMetaData.count() == 0)

    LicenseDetails.delete_all()
    assert (LicenseDetails.count() == 0)

    Author.delete_all()
    assert (Author.count() == 0)

    CodeMetricsResult.delete_all()
    assert (CodeMetricsResult.count() == 0)

    CodeMetricsLanguage.delete_all()
    assert (CodeMetricsLanguage.count() == 0)

    GithubResult.delete_all()
    assert (GithubResult.count() == 0)

    Contributor.delete_all()
    assert (Contributor.count() == 0)

    Package.delete_all()
    assert (Package.count() == 0)

    Version.delete_all()
    assert (Version.count() == 0)