def test_add_license_attr():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    ts1 = v.last_updated
    assert p.last_updated is not None
    assert ts1 is not None

    license_data = serve_static_json["analyses"]["source_licenses"]
    _, _, licenses = LicenseDetails.load_from_json(license_data)
    v.add_license_attribute(licenses)
    assert v.last_updated >= ts1

    version_criteria = {
        'pecosystem': v.ecosystem_package.ecosystem,
        'pname': v.ecosystem_package.name,
        'version': v.version
    }
    present_version = Version.find_by_criteria('Version', p, version_criteria)
    assert present_version.last_updated == v.last_updated
    assert (len(present_version.licenses) == 1)
    test_set = ['MITNFA']

    assert present_version.licenses == test_set

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
Exemple #2
0
def test_empty_github_results():
    p = Package.load_from_json(npm_sequence_3)
    assert p.save() is not None
    v = Version.load_from_json(npm_sequence_3, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    # input_json = gv.read_from_file('test/data/npm-sequence-3.0.0.json')
    github_data = npm_sequence_3["analyses"]["github_details"]
    github_result = GithubResult.load_from_json(github_data)
    assert (github_result.details is not None)
    assert github_result.last_updated is None

    # gid = github_result.save()
    v.add_edge_github_details(github_result)
    ls_before = github_result.last_updated
    assert ls_before is not None
    assert (github_result.id is not None)

    v.add_edge_github_details(github_result)
    ls_after = github_result.last_updated
    assert ls_after >= ls_before
    assert GithubResult.count() == 1

    GithubResult.delete_by_id(github_result.id)

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_person_author():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    authors_data = serve_static_json["analyses"]["metadata"]
    authors_list = Author.load_from_json(authors_data)
    ts_list = []
    for a in authors_list:
        a.save()
        assert a.last_updated is not None
        ts_list.append(a.last_updated)
        v.add_edge_author(a)

    author_before = Author.count()
    assert (author_before == 1)

    author_detail = Author(name='Douglas Christopher Wilson',
                           email='*****@*****.**')
    author_detail.save()
    assert author_detail.last_updated >= a.last_updated
    assert (Author.count() == 1)

    for a in authors_list:
        Author.delete_by_id(a.id)

    Author.delete_by_id(author_detail.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_person_contributor():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    contributors_data = serve_static_json["analyses"]["metadata"]
    contributors_list = Contributor.load_from_json(contributors_data)
    for c in contributors_list:
        c.save()
        assert c.last_updated is not None
        v.add_edge_contributor(c)

    assert (Contributor.count() == 0)

    for c in contributors_list:
        Contributor.delete_by_id(c.id)

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_support_vector_github_detail():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    github_data = serve_static_json["analyses"]["github_details"]
    github_result = GithubResult.load_from_json(github_data)
    assert github_result.last_updated is None

    v.add_edge_github_details(github_result)
    ls_before = github_result.last_updated
    assert (GithubResult.count() == 1)

    count_before = len(v.get_version_out_edge('has_github_details'))
    assert count_before == 1

    #try adding the edge again
    v.add_edge_github_details(github_result)
    count_after = len(v.get_version_out_edge('has_github_details'))
    ls_after = github_result.last_updated
    assert count_before == count_after
    assert ls_after >= ls_before

    GithubResult.delete_by_id(github_result.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
Exemple #6
0
def test_blackduck_graph():
    bl_list = []
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    bl_list = bl.load_from_json(input_json['analyses'])
    assert len(bl_list) == 1

    objBlackduck = bl.add_blackduck_issue(bl_list[0])
    v.add_blackduck_cve_edge(objBlackduck.id)

    bl_criteria = {'vulnerability_name': 'CVE-2015-1164'}
    obj_fetch = SecurityDetails.find_by_criteria('CVE', bl_criteria)
    assert obj_fetch.last_updated == objBlackduck.last_updated

    SecurityDetails.delete_by_id(obj_fetch.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
Exemple #7
0
def load_dependencies(ecosystem, dependency_data):
    dependency_pck_list = []
    dependency_ver_list = []
    dependency_type = []
    dependencies = get_dependencies(dependency_data)

    for dep_type, dep_list in dependencies.items():
        for d in dep_list:
            splits = d.split(" ")

            (n, v) = ("", "")
            if len(splits) >= 1:
                n = splits[0]
            if len(splits) >= 2:
                v = splits[1]

            pck_criteria_dict = {'ecosystem': ecosystem, 'name': n}
            pck_dep = Package.find_by_criteria(
                'Package', pck_criteria_dict) or Package(ecosystem, n)

            ver_criteria_dict = {
                'pecosystem': ecosystem,
                'pname': n,
                'version': v
            }
            ver_dep = Version.find_by_criteria(
                'Version', pck_dep, ver_criteria_dict) or Version(pck_dep, v)

            dependency_pck_list.append(pck_dep)
            dependency_ver_list.append(ver_dep)
            dependency_type.append(dep_type)

    return dependency_pck_list, dependency_ver_list, dependency_type
def test_support_vector_security():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    security_data = serve_static_json["analyses"]["security_issues"]
    security_list, cvss_score, cve_ids = SecurityDetails.load_from_json(
        security_data)
    ts_list = []
    for s, cvss, cve in zip(security_list, cvss_score, cve_ids):
        s.save()
        assert s.last_updated is not None
        ts_list.append(s.last_updated)
        v.add_security_edge(s, cvss)

    security_before = SecurityDetails.count()
    assert (security_before == 1)

    present_security = SecurityDetails.find_by_criteria(
        'CVE', {'cve_id': 'CVE-2015-1164'})
    assert (len(present_security.references) == 5)
    ref_list = [
        "https://github.com/expressjs/serve-static/issues/26",
        "https://bugzilla.redhat.com/show_bug.cgi?id=1181917",
        "http://xforce.iss.net/xforce/xfdb/99936",
        "http://www.securityfocus.com/bid/72064",
        "http://nodesecurity.io/advisories/serve-static-open-redirect"
    ]
    assert (all(r in ref_list for r in present_security.references))

    repeat_security_detail = SecurityDetails(cve_id='CVE-2015-1164',
                                             cvss=4.3,
                                             summary='')
    repeat_security_detail.issue_has_access('authentication', '')
    repeat_security_detail.issue_has_access('vector', 'NETWORK')
    repeat_security_detail.issue_has_access('complexity', 'MEDIUM')
    repeat_security_detail.issue_has_impact('integrity', 'partial')
    repeat_security_detail.issue_has_impact('confidentiality', '')
    repeat_security_detail.issue_has_impact('availability', '')

    repeat_security_detail.save()
    assert repeat_security_detail.id == s.id
    assert repeat_security_detail.last_updated >= ts_list[0]
    assert (SecurityDetails.count() == 1)

    for s in security_list:
        SecurityDetails.delete_by_id(s.id)

    SecurityDetails.delete_by_id(repeat_security_detail.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
Exemple #9
0
def test_additional_data_as_attr():

    add_details = {}
    pck_obj = Package.load_from_json(serve_static_json)
    assert pck_obj.last_updated is None
    assert pck_obj.save() is not None

    ver_obj = Version.load_from_json(serve_static_json, package=pck_obj)
    assert ver_obj.last_updated is None
    ver_obj.save()
    v_ts1 = ver_obj.last_updated
    assert pck_obj.last_updated is not None
    assert v_ts1 is not None

    pck_obj.create_version_edge(ver_obj)
    code_metrics = CMR.load_from_json(
        npm_crumb_data["analyses"]["code_metrics"])

    security_data = serve_static_json["analyses"]["security_issues"]
    security_list, cvss_score, cve_ids = SD.load_from_json(security_data)

    if len(cvss_score) > 0 and len(cve_ids) > 0:
        cve_ids = cve_ids
        cvss = cvss_score

        add_details["cve_ids"] = cve_ids[0]
        add_details["cvss"] = cvss[0]

    ver_obj.add_additional_data_as_attr(code_metrics)

    ver_obj.add_cve_ids(add_details["cvss"], add_details["cve_ids"])
    assert ver_obj.last_updated >= v_ts1

    version_criteria = {
        'pecosystem': pck_obj.ecosystem,
        'pname': pck_obj.name,
        'version': ver_obj.version
    }

    present_version = Version.find_by_criteria(ver_obj.label, pck_obj,
                                               version_criteria)
    logger.info(present_version.__dict__)
    assert present_version.cm_loc == 1351
    assert present_version.cm_num_files == 16
    assert present_version.cm_avg_cyclomatic_complexity == -1
    assert present_version.cve_ids == ['CVE-2015-1164:7.5']
    assert present_version.last_updated == ver_obj.last_updated

    Version.delete_by_id(ver_obj.id)
    Package.delete_by_id(pck_obj.id)
def test_create_package_entity():

    packages = Package.find_all()
    assert (len(packages) == 0)

    list_epv_1 = [{
        'ecosystem': 'maven',
        'name': 'org.slf4j:slf4j-api',
        'version': '1.5.6'
    }]
    import_epv_from_folder('test/data/S3-data', list_epv=list_epv_1)

    criteria_dict = {'ecosystem': 'maven', 'name': 'org.slf4j:slf4j-api'}
    p = Package.find_by_criteria('Package', criteria_dict)
    assert p.latest_version == '1.7.22'

    p.save()  # must be an update
    assert (Package.count() == 2)

    p.create()  # duplicate should not create new node
    assert (Package.count() == 2)

    criteria_dict = {'ecosystem': 'maven', 'name': 'junit:junit'}
    p2 = Package.find_by_criteria('Package', criteria_dict)

    assert p2.latest_version == '-1'

    list_epv_2 = [{
        'ecosystem': 'maven',
        'name': 'junit:junit',
        'version': '4.8.2'
    }]
    import_epv_from_folder('test/data/S3-data', list_epv=list_epv_2)

    criteria_dict = {'ecosystem': 'maven', 'name': 'junit:junit'}
    p3 = Package.find_by_criteria('Package', criteria_dict)
    assert p3.latest_version == '4.12'

    p.save()  # must be an update
    assert (Package.count() == 2)

    Package.delete_all()
    assert (Package.count() == 0)

    Version.delete_all()
    assert (Version.count() == 0)
Exemple #11
0
def test_package_import_from_folder():
    package_dir = 'test/data/S3-data/maven/junit:junit'

    packages = Package.find_all()
    assert (len(packages) == 0)

    versions = Version.find_all()
    assert (len(versions) == 0)

    p = Package()
    assert (p.id is None)
    assert (p.label == "Package")
    assert p.last_updated is None

    report = import_from_folder(package_dir)

    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 1)
    assert (report.get('last_imported_EPV') == '4.8.2.json')
    assert (report.get('max_finished_at') == '2017-02-24T13:42:29.665786')

    criteria_dict = {'ecosystem': 'maven', 'name': 'junit:junit'}
    p1 = Package.find_by_criteria('Package', criteria_dict)

    assert p1 is not None
    assert p1.id is not None
    assert (p1.ecosystem == 'maven')
    assert (p1.latest_version == '4.12')
    assert (p1.package_dependents_count == -1)
    assert (p1.name == 'junit:junit')

    criteria_dict = {
        'pecosystem': 'maven',
        'pname': 'junit:junit',
        'version': '4.8.2'
    }
    v1 = Version.find_by_criteria('Version', p1, criteria_dict)

    assert v1 is not None
    assert v1.ecosystem_package is not None
    assert (v1.ecosystem_package.ecosystem == 'maven')

    packages = Package.find_all()
    assert (len(packages) == 1)

    versions = Version.find_all()
    assert (len(versions) == 1)

    Package.delete_by_id(p1.id)
    assert (Package.count() == 0)

    Version.delete_by_id(v1.id)
    assert (Version.count() == 0)

    GraphMetaData.delete_all()
    assert (GraphMetaData.count() == 0)
def test_version_dependencies():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)
    dependency_data = serve_static_json["analyses"]["dependency_snapshot"]
    dependency_pck_list, dependency_ver_list, dependency_type = \
        vdv.load_dependencies(v.ecosystem_package.ecosystem, dependency_data)
    for d_pck, d_ver, d_type in zip(dependency_pck_list, dependency_ver_list,
                                    dependency_type):
        assert d_pck.save() is not None
        d_ver.save()
        d_pck.create_version_edge(d_ver)
        v.add_edge_dependency(d_ver, d_type)

    assert (Version.count_dependency(v.id) == 4)
    assert (Version.count() + Package.count() == 10)

    for pd, vd in zip(dependency_pck_list, dependency_ver_list):
        Version.delete_by_id(vd.id)
        Package.delete_by_id(pd.id)

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
Exemple #13
0
def test_github_attr():
    pck_obj = Package.load_from_json(serve_static_json)
    assert pck_obj.last_updated is None

    assert pck_obj.save() is not None
    p_ts1 = pck_obj.last_updated
    assert p_ts1 is not None
    assert Package.count() == 1

    github_result = load_github_result_from_json(serve_static_json)
    assert github_result.last_updated is None

    pck_obj.add_github_details_as_attr(github_result)
    assert pck_obj.last_updated >= p_ts1

    package_criteria = {'ecosystem': pck_obj.ecosystem, 'name': pck_obj.name}

    present_package = Package.find_by_criteria('Package', package_criteria)
    assert present_package.last_updated == pck_obj.last_updated
    assert present_package.gh_forks == 84
    assert present_package.gh_stargazers == 538
    assert present_package.gh_issues_last_year_opened == 15
    assert present_package.gh_issues_last_year_closed == 16
    assert present_package.gh_issues_last_month_opened == 0
    assert present_package.gh_issues_last_month_closed == 0
    assert present_package.gh_prs_last_year_opened == 11
    assert present_package.gh_prs_last_year_closed == 11
    assert present_package.gh_prs_last_month_opened == 1
    assert present_package.gh_prs_last_month_closed == 1

    Package.delete_by_id(pck_obj.id)
    assert Package.count() == 0
def test_add_code_metrics_non_empty():
    p = Package.load_from_json(npm_crumb_data)
    assert p.save() is not None

    v = Version.load_from_json(npm_crumb_data, package=p)
    v.save()
    p.create_version_edge(v)
    assert p.last_updated is not None
    assert v.last_updated is not None
    code_metrics_data = npm_crumb_data["analyses"]["code_metrics"]
    code_metrics = CodeMetricsResult.load_from_json(code_metrics_data)
    assert code_metrics.last_updated is None
    assert code_metrics.id is None

    v.add_code_metrics_edge(code_metrics)
    assert code_metrics.id >= 0
    assert code_metrics.last_updated is not None
    assert code_metrics.last_updated > v.last_updated
    assert (CodeMetricsResult.count() == 1)
    assert (CodeMetricsLanguage.count() == 6)

    count_before = len(v.get_version_out_edge('has_code_metrics'))
    assert count_before == 1

    #try adding the edge again, should not let this happen
    results2 = v.add_code_metrics_edge(code_metrics)

    count_after = len(v.get_version_out_edge('has_code_metrics'))
    assert count_after == count_before
    assert CodeMetricsLanguage.count() == 6

    lang_nodes = CodeMetricsLanguage.find_all()

    for lang_node in lang_nodes:
        CodeMetricsLanguage.delete_by_id(lang_node.id)

    CodeMetricsLanguage.delete_by_id(code_metrics.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
Exemple #15
0
def test_bucket_import_from_folder():
    bucket_dir = 'test/data/S3-data'

    packages = Package.find_all()
    assert (len(packages) == 0)

    p = Package()
    assert (p.id is None)
    assert (p.label == "Package")
    assert p.last_updated is None

    report = import_from_folder(bucket_dir)

    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 3)
    assert (report.get('last_imported_EPV') == 'npm/sequence/3.0.0.json')
    assert (report.get('max_finished_at') == '2017-02-24T13:43:11.872916')

    criteria_dict = {'ecosystem': 'npm', 'name': 'sequence'}
    p1 = Package.find_by_criteria('Package', criteria_dict)

    assert p1 is not None
    assert p1.id is not None
    assert (p1.ecosystem == 'npm')
    assert (p1.latest_version == '3.0.0')
    assert (p1.package_dependents_count == 22)
    assert (p1.name == 'sequence')

    criteria_dict = {
        'pecosystem': 'npm',
        'pname': 'sequence',
        'version': '3.0.0'
    }
    v1 = Version.find_by_criteria('Version', p1, criteria_dict)

    assert v1 is not None
    assert v1.ecosystem_package is not None
    assert (v1.ecosystem_package.ecosystem == 'npm')

    packages = Package.find_all()
    assert (len(packages) == 3)

    versions = Version.find_all()
    assert (len(versions) == 4)

    Package.delete_all()
    assert (Package.count() == 0)

    Version.delete_all()
    assert (Version.count() == 0)

    GraphMetaData.delete_all()
    assert (GraphMetaData.count() == 0)
def test_support_vector_license():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    p.create_version_edge(v)

    assert p.last_updated is not None
    assert v.last_updated is not None

    license_data = serve_static_json["analyses"]["source_licenses"]
    license_details_list, license_counts_list, _ = LicenseDetails.load_from_json(
        license_data)
    ts_list = []
    for license_detail, license_count in zip(license_details_list,
                                             license_counts_list):
        license_detail.save()
        assert license_detail.last_updated is not None
        ts_list.append(license_detail.last_updated)
        v.add_license_edge(license_detail, license_count)

    assert (LicenseDetails.count() == 1)

    new_license_detail = LicenseDetails(name='MITNFA')
    new_license_detail.save()
    assert new_license_detail.last_updated >= ts_list[0]

    # Duplicate license should not be inserted
    assert (LicenseDetails.count() == 1)

    for l_id in license_details_list:
        LicenseDetails.delete_by_id(l_id.id)
        LicenseDetails.delete_by_id(new_license_detail.id)
    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
def test_create_package_entity():

    src_dir = 'test/data/full_import/npm/serve-static'
    data_source = LocalFileSystem(src_dir)
    list_keys = data_source.list_files()
    grouped_keys = _group_keys_directory(list_keys, data_source.src_dir)
    report = _import_grouped_keys_http(data_source, grouped_keys)
    assert report is not None
    assert report['status'] == 'Success'
    assert report['message'] == 'The import finished successfully!'
    assert report['last_imported_EPV'] == '1.7.1.json'
    assert report['count_imported_EPVs'] == 1
    assert report['max_finished_at'] == '2017-02-08T12:26:51.962609'

    criteria_dict = {'ecosystem': 'npm', 'name': 'serve-static'}
    p = Package.find_by_criteria('Package', criteria_dict)
    assert p is not None
    assert p.ecosystem == 'npm'
    assert p.name == 'serve-static'
    assert p.tokens == ['serve', 'static']

    assert Package.count() == 1
    Package.delete_by_id(p.id)
    assert Package.count() == 0
def test_package() -> None:
    package = Package(
        '009e35ef-1f50-4bf3-ab58-11eb85bf5503',
        Name('Soldered Wire Connector 1x19 ⌀1.0mm'),
        Description(
            'A 1x19 soldered wire connector with 2.54mm pin spacing and 1.0mm drill holes.\n\nGenerated with librepcb-parts-generator (generate_connectors.py)'
        ), Keywords('connector, 1x19, d1.0, connector, soldering, generic'),
        Author('Danilo B.'), Version('0.1'), Created('2018-10-17T19:13:41Z'),
        Deprecated(False), Category('56a5773f-eeb4-4b39-8cb9-274f3da26f4f'))

    package.add_pad(
        PackagePad('5c4d39d3-35cc-4836-a082-693143ee9135', Name('1')))
    package.add_pad(
        PackagePad('6100dd55-d3b3-4139-9085-d5a75e783c37', Name('2')))

    package.add_footprint(create_footprint())
    assert str(
        package) == """(librepcb_package 009e35ef-1f50-4bf3-ab58-11eb85bf5503
def test_version_entity():
    p = Package.load_from_json(serve_static_json)
    assert p.save() is not None

    v = Version.load_from_json(serve_static_json, package=p)
    v.save()
    assert (Version.count() == 1)
    ls_before = v.last_updated
    assert ls_before is not None

    criteria_dict = {
        'pecosystem': 'npm',
        'pname': 'serve-static',
        'version': '1.7.1'
    }
    v2 = Version.find_by_criteria('Version', p, criteria_dict)
    assert v2.last_updated == v.last_updated

    v.save()
    ls_after = v.last_updated
    assert (Version.count() == 1)
    assert ls_after >= ls_before
    assert v.last_updated >= v2.last_updated

    test_packaged_in = ['nodejs-serve-static']
    test_published_in = []
    assert (all(pck in test_packaged_in for pck in v.is_packaged_in))
    assert (all(pub in test_published_in for pck in v.is_published_in))

    # now create an edge
    edge_count_before = Package.edge_count()
    p.create_version_edge(v)
    edge_count_after = Package.edge_count()
    assert (edge_count_after == edge_count_before + 1)

    # now try to create an edge again
    edge_count_before = Package.edge_count()
    p.create_version_edge(v)
    edge_count_after = Package.edge_count()
    assert (edge_count_after == edge_count_before)

    # this should return all versions associated with this package
    p_versions = p.get_versions()
    assert (len(p_versions) == 1)

    Version.delete_by_id(v.id)
    Package.delete_by_id(p.id)
Exemple #20
0
def test_ecosystem_import_from_folder():
    ecosystem_dir = 'test/data/S3-data/maven'

    packages = Package.find_all()
    assert (len(packages) == 0)

    p = Package()
    assert (p.id is None)
    assert (p.label == "Package")
    assert p.last_updated is None

    report = import_from_folder(ecosystem_dir)

    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 2)
    assert (
        report.get('last_imported_EPV') == 'org.slf4j:slf4j-api/1.5.6.json')
    assert (report.get('max_finished_at') == '2017-02-24T13:43:11.872916')

    criteria_dict = {'ecosystem': 'maven', 'name': 'junit:junit'}
    p1 = Package.find_by_criteria('Package', criteria_dict)

    criteria_dict = {
        'pecosystem': 'maven',
        'pname': 'junit:junit',
        'version': '4.8.2'
    }
    v1 = Version.find_by_criteria('Version', p1, criteria_dict)

    assert v1 is not None
    assert v1.ecosystem_package is not None
    assert (v1.ecosystem_package.ecosystem == 'maven')

    packages = Package.find_all()
    assert (len(packages) == 2)

    versions = Version.find_all()
    assert (len(versions) == 3)

    Package.delete_all()
    assert (Package.count() == 0)

    Version.delete_all()
    assert (Version.count() == 0)

    GraphMetaData.delete_all()
    assert (GraphMetaData.count() == 0)
def load_package_version_values(filename):

    input_json = gv.read_from_file(filename)

    objpackage = Package.load_from_json(input_json)

    objversion = Version.load_from_json(input_json, objpackage)

    github_data = input_json["analyses"]["github_details"]
    github_result = GithubResult.load_from_json(github_data)
    objversion.github_details = github_result

    authors_data = input_json["analyses"]["metadata"]
    authors_list = Author.load_from_json(authors_data)
    for objauthor in authors_list:
        objversion.version_authored_by(objauthor)

    contributors_data = input_json["analyses"]["metadata"]
    contributor_list = Contributor.load_from_json(contributors_data)
    for objcontributor in contributor_list:
        objversion.version_contributed_by(objcontributor)

    license_data = input_json["analyses"]["source_licenses"]
    license_details_list, license_counts_list, license_names = LicenseDetails.load_from_json(
        license_data)
    for objlicense, license_count in zip(license_details_list,
                                         license_counts_list):
        objversion.version_covered_under(objlicense, license_count)

    objversion.licenses = license_names

    dependency_data = input_json["analyses"]["dependency_snapshot"]
    _, dependency_ver_list, dependency_type = \
        vdv.load_dependencies(
            objversion.ecosystem_package.ecosystem, dependency_data)
    for d_ver, d_type in zip(dependency_ver_list, dependency_type):
        objversion.version_depends_on(d_ver, d_type)

    security_data = input_json["analyses"]["security_issues"]
    security_list, __, _ = SecurityDetails.load_from_json(security_data)
    for objsecurity in security_list:
        objversion.version_has_nvd_issues(objsecurity)

    return objversion
 def save(self):
     package_criteria = {
         'ecosystem': self.ecosystem_package.ecosystem,
         'name': self.ecosystem_package.name
     }
     pck_obj = Package.find_by_criteria('Package', package_criteria)
     if pck_obj is None:
         logger.error(
             "create() failed because package node does not exists")
         return None
     version_criteria = {
         'pecosystem': self.ecosystem_package.ecosystem,
         'pname': self.ecosystem_package.name,
         'version': self.version
     }
     present_version = Version.find_by_criteria(self.label, pck_obj,
                                                version_criteria)
     if present_version is None:
         return self.create()
     else:
         self.id = present_version.id
         return self.update()
Exemple #23
0
    def populate_from_json(cls, input_json):

        # NPM packages with dependencies, versions i.e. Package version
        # insertion
        logger.info("Instantiating package ...")
        package = Package.load_from_json(input_json)
        logger.info("Saving package ...")
        pkg_id = package.save()
        logger.info(" Package node ID: %s" % pkg_id)
        if pkg_id is None:
            return
        version = Version.load_from_json(input_json, package=package)
        ver_id = version.save()
        logger.info(" Version node ID: %s" % ver_id)
        package.create_version_edge(version)

        analyses = input_json["analyses"]
        if "dependency_snapshot" in analyses:
            dependency_snapshot = analyses["dependency_snapshot"]
            dependency_pck_list, dependency_ver_list, dependency_type = vdv.load_dependencies(
                version.ecosystem_package.ecosystem, dependency_snapshot)
            for d_pck, d_ver, d_type in zip(dependency_pck_list,
                                            dependency_ver_list,
                                            dependency_type):
                d_pck.save()
                if d_pck is None:
                    continue
                d_ver.save()
                d_pck.create_version_edge(d_ver)
                version.add_edge_dependency(d_ver, d_type)

        if "metadata" in analyses:
            meta_data = analyses["metadata"]
            print("  Adding authors_list")
            authors_list = Author.load_from_json(meta_data)
            for author in authors_list:
                a_id = author.save()
                print("    author ID: %s" % a_id)
                version.add_edge_author(author)

            print("  Adding contributor_list")
            contributor_list = Contributor.load_from_json(meta_data)
            for contributor in contributor_list:
                c_id = contributor.save()
                print("    contributor ID: %s" % c_id)
                version.add_edge_author(contributor)

        # License Information
        if "source_licenses" in analyses:
            print("  Adding source_licenses")
            licenses = set()
            license_data = analyses["source_licenses"]
            license_details_list, license_counts_list, licenses = LicenseDetails.load_from_json(
                license_data)
            for used_license, license_count in zip(license_details_list,
                                                   license_counts_list):
                lic_id = used_license.save()
                print("    license_data ID: %s" % lic_id)
                version.add_license_edge(used_license, license_count)

            version.add_license_attribute(licenses)

        # NVD Security Information
        if "security_issues" in analyses:
            print("  Adding security_issues")
            security_data = analyses["security_issues"]
            security_list, cvss_score, cve_ids = SecurityDetails.load_from_json(
                security_data)
            for s, cvss, cve_id in zip(security_list, cvss_score, cve_ids):
                ss_id = s.save()
                print("    security_data ID: %s" % ss_id)
                version.add_security_edge(s, cvss)
                version.add_cve_ids(cvss, cve_id)

        # GitHub Details
        if "github_details" in analyses:
            print("  Adding github_details")
            github_data = analyses["github_details"]
            github_result = GithubResult.load_from_json(github_data)
            package.add_github_details_as_attr(github_result)
            version.add_edge_github_details(github_result)

        # Code Metrics
        if "code_metrics" in analyses:
            print("  Adding code_metrics")
            code_metrics_data = analyses["code_metrics"]
            code_metrics = CodeMetricsResult.load_from_json(code_metrics_data)
            version.add_code_metrics_edge(code_metrics)
            version.add_additional_data_as_attr(code_metrics)

        if "blackduck" in analyses:
            print("Adding extra security info via blackduck")
            blackduck_cve = analyses["blackduck"]
            issue_list = bl.load_from_json(blackduck_cve)
            for issue in issue_list:
                bl_obj = bl.add_blackduck_issue(issue)
                version.add_blackduck_cve_edge(bl_obj.id)
def test_create_package_entity():
    p = Package()
    assert (p.id is None)
    assert (p.label == "Package")
    assert p.last_updated is None

    packages = Package.find_all()
    assert (len(packages) == 0)

    p = Package.load_from_file('test/data/npm--serve-static-1.7.1.json')

    r = p.save()
    assert (r is not None)
    ls_before = p.last_updated
    assert ls_before is not None
    assert (Package.count() == 1)

    criteria_dict = {'ecosystem': 'npm', 'name': 'serve-static'}
    p2 = Package.find_by_criteria('Package', criteria_dict)
    assert p2.last_updated == p.last_updated

    p.save()  # must be an update
    assert (Package.count() == 1)
    ls_after = p.last_updated
    assert ls_after >= ls_before

    p.create()  # duplicate should not create new node
    assert (Package.count() == 1)
    Package.delete_by_id(p.id)

    assert (Package.count() == 0)
Exemple #25
0
def generate_pkg(
    dirpath: str,
    diameter: float,
    height: float,
    pitch: float,
    lead_width: float,
    author: str,
    version: str,
    create_date: Optional[str],
) -> None:
    # Name according IPC-7351 "Capacitor, Polarized Radial Diameter":
    # CAPPRD + Lead Spacing + W Lead Width + D Body Diameter + H Body Height
    name = 'CAPPRD{}W{}D{}H{}'.format(format_ipc_dimension(pitch),
                                      format_ipc_dimension(lead_width),
                                      format_ipc_dimension(diameter),
                                      format_ipc_dimension(height))
    variant = get_variant(diameter, height, pitch, lead_width)

    def _pkg_uuid(identifier: str) -> str:
        return uuid('pkg', variant, identifier)

    def _create_footprint(footprint_identifier: str, name: str) -> Footprint:
        def _fpt_uuid(identifier: str) -> str:
            return _pkg_uuid(footprint_identifier + '-' + identifier)

        drill = LEAD_WIDTH_TO_DRILL[lead_width]
        restring = min(
            (0.4 if diameter >= 6.0 else 0.3),  # preferred restring
            (pitch - drill - 0.25) / 2)  # minimum required restring
        pad_diameter = drill + (2 * restring)  # outer diameter of pad
        courtyard_diameter = diameter + (1.0 if diameter >= 10.0 else 0.8)

        def _generate_fill_polygon(identifier: str, layer: str) -> Polygon:
            polygon = Polygon(
                uuid=_fpt_uuid(identifier),
                layer=Layer(layer),
                width=Width(0.0),
                fill=Fill(True),
                grab_area=GrabArea(False),
            )
            if ((pitch - pad_diameter) < 0.6):
                # not enough space, use a simplified polygon
                vertices = [
                    (0.0, (diameter / 2) - 0.2, 0.0),
                    (0.0, (pad_diameter / 2) + 0.2, 0.0),
                    (pitch / 2, (pad_diameter / 2) + 0.2, -180.0),
                    (pitch / 2, -(pad_diameter / 2) - 0.2, 0.0),
                    (0.0, -(pad_diameter / 2) - 0.2, 0.0),
                    (0.0, -(diameter / 2) + 0.2, 180.0),
                    (0.0, (diameter / 2) - 0.2, 0.0),
                ]
            else:
                vertices = [
                    (0.0, (diameter / 2) - 0.2, 0.0),
                    (0.0, 0.0, 0.0),
                    ((pitch / 2) - (pad_diameter / 2) - 0.2, 0.0, -180.0),
                    ((pitch / 2) + (pad_diameter / 2) + 0.2, 0.0, -180.0),
                    ((pitch / 2) - (pad_diameter / 2) - 0.2, 0.0, 0.0),
                    (0.0, 0.0, 0.0),
                    (0.0, -(diameter / 2) + 0.2, 180.0),
                    (0.0, (diameter / 2) - 0.2, 0.0),
                ]
            for vertex in vertices:
                polygon.add_vertex(
                    Vertex(Position(vertex[0], vertex[1]), Angle(vertex[2])))
            return polygon

        footprint = Footprint(
            uuid=_fpt_uuid('footprint'),
            name=Name(name),
            description=Description(''),
        )
        footprint.add_pad(
            FootprintPad(
                uuid=_pkg_uuid('pad-plus'),
                side=Side.THT,
                shape=Shape.RECT,
                position=Position(-pitch / 2, 0),
                rotation=Rotation(0),
                size=Size(pad_diameter, pad_diameter),
                drill=Drill(drill),
            ))
        footprint.add_pad(
            FootprintPad(
                uuid=_pkg_uuid('pad-minus'),
                side=Side.THT,
                shape=Shape.ROUND,
                position=Position(pitch / 2, 0),
                rotation=Rotation(0),
                size=Size(pad_diameter, pad_diameter),
                drill=Drill(drill),
            ))

        # placement
        footprint.add_circle(
            Circle(
                uuid=_fpt_uuid('circle-placement'),
                layer=Layer('top_placement'),
                width=Width(0.2),
                fill=Fill(False),
                grab_area=GrabArea(False),
                diameter=Diameter(diameter + 0.2),
                position=Position(0.0, 0.0),
            ))
        footprint.add_polygon(
            _generate_fill_polygon(
                identifier='polygon-placement-fill',
                layer='top_placement',
            ))

        # documentation
        footprint.add_circle(
            Circle(
                uuid=_fpt_uuid('circle-documentation'),
                layer=Layer('top_documentation'),
                width=Width(0.2),
                fill=Fill(False),
                grab_area=GrabArea(False),
                diameter=Diameter(diameter - 0.2),
                position=Position(0.0, 0.0),
            ))
        footprint.add_polygon(
            _generate_fill_polygon(
                identifier='polygon-documentation-fill',
                layer='top_documentation',
            ))

        # courtyard
        footprint.add_circle(
            Circle(
                uuid=_fpt_uuid('circle-courtyard'),
                layer=Layer('top_courtyard'),
                width=Width(0.2),
                fill=Fill(False),
                grab_area=GrabArea(False),
                diameter=Diameter(courtyard_diameter),
                position=Position(0.0, 0.0),
            ))

        # texts
        footprint.add_text(
            StrokeText(
                uuid=_fpt_uuid('text-name'),
                layer=Layer('top_names'),
                height=Height(1.0),
                stroke_width=StrokeWidth(0.2),
                letter_spacing=LetterSpacing.AUTO,
                line_spacing=LineSpacing.AUTO,
                align=Align('center bottom'),
                position=Position(0.0, (diameter / 2) + 0.8),
                rotation=Rotation(0.0),
                auto_rotate=AutoRotate(True),
                mirror=Mirror(False),
                value=Value('{{NAME}}'),
            ))
        footprint.add_text(
            StrokeText(
                uuid=_fpt_uuid('text-value'),
                layer=Layer('top_values'),
                height=Height(1.0),
                stroke_width=StrokeWidth(0.2),
                letter_spacing=LetterSpacing.AUTO,
                line_spacing=LineSpacing.AUTO,
                align=Align('center top'),
                position=Position(0.0, -(diameter / 2) - 0.8),
                rotation=Rotation(0.0),
                auto_rotate=AutoRotate(True),
                mirror=Mirror(False),
                value=Value('{{VALUE}}'),
            ))
        return footprint

    # package
    package = Package(
        uuid=_pkg_uuid('pkg'),
        name=Name(name),
        description=Description(
            'Polarized radial electrolytic capacitor.\\n\\n' +
            'Diameter: {} mm\\n'.format(diameter) +
            'Height: {} mm\\n'.format(height) +
            'Lead Spacing: {} mm\\n'.format(pitch) +
            'Max. Lead Diameter: {} mm\\n\\n'.format(lead_width) +
            'Generated with {}'.format(generator)),
        keywords=Keywords(
            'electrolytic,capacitor,polarized,radial,c,cap,cpol'),
        author=Author(author),
        version=Version(version),
        created=Created(create_date or now()),
        deprecated=Deprecated(False),
        category=Category('ee75e31d-f231-41d9-8a3b-bea5114f41e3'),
    )
    package.add_pad(PackagePad(uuid=_pkg_uuid('pad-plus'), name=Name('+')))
    package.add_pad(PackagePad(uuid=_pkg_uuid('pad-minus'), name=Name('-')))
    package.add_footprint(
        _create_footprint(
            footprint_identifier='default',
            name='default',
        ))

    # write files
    pkg_dir_path = path.join(dirpath, package.uuid)
    if not (path.exists(pkg_dir_path) and path.isdir(pkg_dir_path)):
        makedirs(pkg_dir_path)
    with open(path.join(pkg_dir_path, '.librepcb-pkg'), 'w') as f:
        f.write('0.1\n')
    with open(path.join(pkg_dir_path, 'package.lp'), 'w') as f:
        f.write(str(package))
        f.write('\n')
    print('Wrote package {}'.format(name))
    def create(self):
        logger.debug("create() %s - data:\n%s\n" %
                     (self.label, self.to_json()))

        if self.ecosystem_package is None:
            logger.error("create() failed because ecosystem_package is None")
            return None
        package_criteria = {
            'ecosystem': self.ecosystem_package.ecosystem,
            'name': self.ecosystem_package.name
        }
        pck_obj = Package.find_by_criteria('Package', package_criteria)
        if pck_obj is None:
            logger.error(
                "create() failed because package node does not exists")
            return None

        try:
            version_criteria = {
                'pecosystem': self.ecosystem_package.ecosystem,
                'pname': self.ecosystem_package.name,
                'version': self.version
            }
            present_version = Version.find_by_criteria(self.label, pck_obj,
                                                       version_criteria)
            if present_version is None:
                ts = time.time()
                query = self.g().addV(self.label). \
                    property('vertex_label', self.label). \
                    property('pname', self.ecosystem_package.name). \
                    property('pecosystem', self.ecosystem_package.ecosystem). \
                    property('version', self.version). \
                    property('description', self.description). \
                    property('dependents_count', self.dependents_count). \
                    property('shipped_as_downstream', self.shipped_as_downstream). \
                    property('last_updated', ts)

                for pck in self.is_packaged_in:
                    query.property('is_packaged_in', pck)

                for pub in self.is_published_in:
                    query.property('is_published_in', pub)

                results = query.toList()

                logger.debug("create() %s - results: %s" %
                             (self.label, results))

                self.last_updated = ts
                self.id = results[0].id
                logger.debug("results: %s" % (results))
                logger.debug("Vertex ID : %s, %s: %s" %
                             (self.id, self.label, self))

                logger.info("---Create--- %s ---NEW = %d" %
                            (self.label, self.id))

                return self.id

            else:
                logger.debug("Version exists: %s " % present_version.id)
                self.last_updated = present_version.last_updated
                self.id = present_version.id

                logger.info("---Create--- %s ---EXISTS = %d" %
                            (self.label, self.id))

                return self.id

        except Exception as e:
            logger.error("create() failed: %s" % e)
            return None
Exemple #27
0
def test_full_import_and_incr_update():
    data_dir = 'test/data'
    # Let us make sure that target graph has no metadata
    graph_meta = GraphPopulator.get_metadata()
    assert (graph_meta is None)

    # Full import: insert all the EPVs from the given data source
    src_dir = os.path.join(data_dir, 'full_import')
    report = import_bulk(data_source=LocalFileSystem(src_dir=src_dir),
                         book_keeper=None)
    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 1)
    assert (report.get('last_imported_EPV') == 'npm/serve-static/1.7.1.json')
    assert (report.get('max_finished_at') == '2017-02-08T12:26:51.962609')

    graph_meta = GraphPopulator.get_metadata()
    assert (graph_meta is not None)
    assert (graph_meta.last_incr_update_ts == '2017-02-08T12:26:51.962609')

    # Incremental update 1:
    # Let us mimic a scenario where a new EPV was inserted recently: npm/send/0.10.1
    src_dir = os.path.join(data_dir, 'incr_update1')
    book_keeping_json = os.path.join(data_dir, 'book_keeping1.json')
    report = import_bulk(
        data_source=LocalFileSystem(src_dir=src_dir),
        book_keeper=JsonBookKeeper(json_file_name=book_keeping_json))
    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 1)
    assert (report.get('last_imported_EPV') == 'npm/send/0.10.1.json')
    assert (report.get('max_finished_at') == '2017-02-22T15:34:59.469864')

    graph_meta = GraphPopulator.get_metadata()
    assert (graph_meta is not None)
    assert (graph_meta.last_incr_update_ts == '2017-02-22T15:34:59.469864')

    # Incremental update 2:
    # Let us mimic a scenario where a new EPV was inserted recently: npm/parseurl/1.3.1
    # and also an already existing EPV was updated recently: npm/serve-static/1.7.1
    src_dir = os.path.join(data_dir, 'incr_update2')
    book_keeping_json = os.path.join(data_dir, 'book_keeping2.json')
    report = import_bulk(
        data_source=LocalFileSystem(src_dir=src_dir),
        book_keeper=JsonBookKeeper(json_file_name=book_keeping_json))
    assert (report.get('status') == 'Success')
    assert (report.get('count_imported_EPVs') == 2)
    assert (report.get('last_imported_EPV') == 'npm/serve-static/1.7.1.json')
    assert (report.get('max_finished_at') == '2017-02-22T15:35:51.962609')

    graph_meta = GraphPopulator.get_metadata()
    assert (graph_meta is not None)
    assert (graph_meta.last_incr_update_ts == '2017-02-22T15:35:51.962609')

    # Cleanup
    GraphMetaData.delete_all()
    assert (GraphMetaData.count() == 0)

    LicenseDetails.delete_all()
    assert (LicenseDetails.count() == 0)

    Author.delete_all()
    assert (Author.count() == 0)

    CodeMetricsResult.delete_all()
    assert (CodeMetricsResult.count() == 0)

    CodeMetricsLanguage.delete_all()
    assert (CodeMetricsLanguage.count() == 0)

    GithubResult.delete_all()
    assert (GithubResult.count() == 0)

    Contributor.delete_all()
    assert (Contributor.count() == 0)

    Package.delete_all()
    assert (Package.count() == 0)

    Version.delete_all()
    assert (Version.count() == 0)