def test_from_str(self, coords, from_str, is_from_str_ok, to_str,
                   to_str_omit_version, to_repo_url):
     from_strings = from_str if isinstance(from_str, list) else [from_str]
     for fstr in from_strings:
         if is_from_str_ok:
             assert MavenCoordinates.from_str(fstr) == coords
         else:
             with pytest.raises(ValueError):
                 MavenCoordinates.from_str(fstr)
Exemplo n.º 2
0
    def fetch_maven_artifact(ecosystem, name, version, target_dir):
        """Fetch maven artifact from maven.org."""
        git = Git.create_git(target_dir)
        artifact_coords = MavenCoordinates.from_str(name)
        if not version:
            raise ValueError("No version provided for '%s'" %
                             artifact_coords.to_str())
        artifact_coords.version = version
        if not artifact_coords.is_valid():
            raise NotABugTaskError("Invalid Maven coordinates: {a}".format(
                a=artifact_coords.to_str()))

        maven_url = ecosystem.fetch_url
        artifact_url = urljoin(maven_url, artifact_coords.to_repo_url())
        local_filepath = IndianaJones.download_file(artifact_url, target_dir)
        if local_filepath is None:
            raise NotABugTaskError("Unable to download: %s" % artifact_url)

        local_filename = os.path.split(local_filepath)[1]
        artifact_path = os.path.join(target_dir, local_filename)
        digest = compute_digest(artifact_path)
        if artifact_coords.packaging != 'pom':
            Archive.extract(artifact_path, target_dir)
            if artifact_coords.packaging == 'aar':
                # 'aar' archive contains classes.jar, extract it too into target_dir
                classes_jar_path = os.path.join(target_dir, "classes.jar")
                if os.path.isfile(classes_jar_path):
                    Archive.extract(classes_jar_path, target_dir)
                    os.remove(classes_jar_path)

        git.add_and_commit_everything()
        return digest, artifact_path
Exemplo n.º 3
0
 def _get_downstream_mvn_pkgs(self, eco, pkg):
     packages = []
     self.log.info('Searching for {pkg} in maven repo {repo}...'.format(
         pkg=pkg, repo=RH_MVN_GA_REPO))
     ga = MavenCoordinates.from_str(pkg).to_repo_url(ga_only=True)
     result = requests.get('{repo}/{pkg}'.format(repo=RH_MVN_GA_REPO,
                                                 pkg=ga))
     if result.status_code != 200:
         self.log.info(
             'Package {pkg} not found in {repo} (status code {code})'.
             format(pkg=pkg, repo=RH_MVN_GA_REPO, code=result.status_code))
     else:
         self.log.info('Found {pkg} in {repo}'.format(pkg=pkg,
                                                      repo=RH_MVN_GA_REPO))
         packages.append(pkg)
     return RH_MVN_DISTRO_NAME, packages
    def _get_dependency_data(self, dependencies, ecosystem):
        dependency_data_list = list()
        self.log.debug("Dependencies are: {}".format(dependencies))
        for dependency in dependencies:
            self.log.info("Analyzing dependency: {}".format(dependency))
            artifact_coords = MavenCoordinates.from_str(dependency)
            qstring = ("g.V().has('pecosystem','" + ecosystem +
                       "').has('pname','" + artifact_coords.groupId + ":" +
                       artifact_coords.artifactId + "')"
                       ".has('version','" + artifact_coords.version + "').")
            qstring += (
                "as('version').in('has_version').as('package').dedup()." +
                "select('version','package').by(valueMap());")
            payload = {'gremlin': qstring}
            try:
                graph_req = get_session_retry().post(GREMLIN_SERVER_URL_REST,
                                                     data=json.dumps(payload))
                if graph_req.status_code == 200:
                    graph_resp = graph_req.json()
                    data = graph_resp.get('result', {}).get('data')
                    if data:
                        version_data = self.parse_version_data(
                            data[0].get('version'))
                        package_data = self.parse_package_data(
                            data[0].get('package'))
                        dependency_data = version_data.copy()
                        dependency_data.update(package_data)
                        dependency_data_list.append(dependency_data)
                else:
                    self.log.error("Failed retrieving dependency data.")
                    continue
            except Exception:
                self.log.exception("Error retrieving dependency data.")
                continue

        self.log.debug(
            "Dependency data list is: {}".format(dependency_data_list))
        return dependency_data_list
    def _add_mvn_results(self, result_summary, anitya_mvn_names, version):
        def _compare_version(downstream, upstream):
            dv = downstream
            if 'redhat' in dv:
                # remove ".redhat-X" or "-redhat-X" suffix
                dv = dv[:dv.find('redhat') - 1]
            if dv == upstream:
                return True
            else:
                return False

        downstream_rebuilds = []

        for name in anitya_mvn_names:
            ga = MavenCoordinates.from_str(name).to_repo_url(ga_only=True)
            metadata_url = '{repo}/{pkg}/maven-metadata.xml'.format(repo=RH_MVN_GA_REPO,
                                                                    pkg=ga)
            res = requests.get(metadata_url)
            if res.status_code != 200:
                self.log.info('Metadata for package {pkg} not found in {repo} (status {code})'.
                              format(pkg=name, repo=RH_MVN_GA_REPO, code=res.status_code))
                continue
            versions = anymarkup.parse(res.text)['metadata']['versioning']['versions']['version']
            # make sure 'versions' is a list (it's a string if there is just one version)
            if not isinstance(versions, list):
                versions = [versions]
            self.log.info('Found versions {v} for package {p}'.format(v=versions, p=name))
            for v in versions:
                if _compare_version(v, version):
                    downstream_rebuilds.append(v)

        result_summary['rh_mvn_matched_versions'] = downstream_rebuilds
        if downstream_rebuilds:
            # For now, we don't distinguish products, we just use general "Middleware"
            #  for all Maven artifacts
            result_summary['all_rhsm_product_names'].append('Middleware')