def check_sourceforge(info, clean_tag=True): """ Check python module version info via sourceforge url """ resp = load_last_query_result(info) tags = [] if resp == "": headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64)'} url = yaml2url.yaml2url(info) print("check_sourceforge, url = " + url) resp = get_resp(url, headers=headers) if not resp: return "" data = resp.text lines = data.splitlines() filter_condition = "\"download_url\": \"" + url for line in lines: if filter_condition in line: tag_infos = line.split(',') for tag_info in tag_infos: if filter_condition in tag_info: tag = tag_info.strip() tag = tag.replace(filter_condition, "") tag = tag.strip("/download\"") tags.append(tag) if clean_tag: tags = clean_tags(tags, info) return tags
def test_get_rubygem_url(): """ Test rubygem repo url """ doc = YAML_DOC.format(version_control="rubygem", src_repo="path") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://rubygems.org/api/v1/versions/path.json"
def test_get_metacpan_url(): """ Test metacpan repo url """ doc = YAML_DOC.format(version_control="metacpan", src_repo="File-Which") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://metacpan.org/release/File-Which"
def test_get_ftp_url(): """ Test ftp repo url """ doc = YAML_DOC.format(version_control="ftp", src_repo="https://ftp.gnu.org/pub/gnu/mailman") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://ftp.gnu.org/pub/gnu/mailman/"
def test_get_pypi_url(): """ Test pypi repo url """ doc = YAML_DOC.format(version_control="pypi", src_repo="pygments") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://pypi.org/pypi/pygments/json"
def test_get_gitee_url(): """ Test gitee repo url """ doc = YAML_DOC.format(version_control="gitee", src_repo="openEuler/lcr") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://gitee.com/openEuler/lcr.git"
def test_get_hg_raw_url(): """ Test hg raw repo url """ doc = YAML_DOC.format(version_control="hg-raw", src_repo="http://hg.libsdl.org/SDL") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "http://hg.libsdl.org/SDL/raw-tags"
def test_get_gnome_url(): """ Test gnome repo url """ doc = YAML_DOC.format(version_control="gitlab.gnome", src_repo="gdm") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://gitlab.gnome.org/GNOME/gdm.git"
def test_get_github_url(): """ Test github repo url """ doc = YAML_DOC.format(version_control="github", src_repo="pixel/hexedit") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://github.com/pixel/hexedit.git"
def test_get_git_url(): """ Test git repo url """ doc = YAML_DOC.format(version_control="git", src_repo="git://sourceware.org/git/glibc.git") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "git://sourceware.org/git/glibc.git"
def test_get_svn_url(): """ Test svn repo url """ doc = YAML_DOC.format(version_control="svn", src_repo="https://svn.apache.org/repos/asf/apr/apr") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://svn.apache.org/repos/asf/apr/apr/tags"
def test_get_hg_url(): """ Test hg repo url """ doc = YAML_DOC.format(version_control="hg", src_repo="https://hg.mozilla.org/projects/python-nss") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://hg.mozilla.org/projects/python-nss/json-tags"
def test_get_gnu_ftp_url(): """ Test gnu ftp repo url """ doc = YAML_DOC.format(version_control="gnu-ftp", src_repo="bc") pkg_info = yaml.load(doc, Loader=yaml.Loader) url = yaml2url.yaml2url(pkg_info) assert url == "https://ftp.gnu.org/gnu/bc/"
def check_svn(info, clean_tag=True): """ Check version info via svn """ resp = load_last_query_result(info) repo_url = yaml2url.yaml2url(info) if resp == "": resp = __check_svn_helper(repo_url) last_query = {} last_query["time_stamp"] = datetime.now() last_query["raw_data"] = resp info["last_query"] = last_query tags = __svn_resp_to_tags(resp) if clean_tag: tags = clean_tags(tags, info) return tags
def _clone_repo(pkg_info): """ Clone repo to local """ repo_url = yaml2url.yaml2url(pkg_info) if not (repo_url and pkg_info["version_control"].startswith("git")): print("WARNING: Patch matching only support for git repo.") return None dir_pkg = os.path.basename(repo_url).split(".")[0] if os.path.exists(dir_pkg): shutil.rmtree(dir_pkg, ignore_errors=True) print("git clone {url}".format(url=repo_url)) subprocess.call(["git clone {url}".format(url=repo_url)], shell=True) if os.path.exists(dir_pkg): return dir_pkg print("WARING: Clone failed, {} not exist.".format(dir_pkg)) return None
def check_gnu_ftp(info, clean_tag=True): """ Check version info via compare ftp release tar file for gnu """ headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64)'} url = yaml2url.yaml2url(info) eprint("{repo} > List ftp directory".format(repo=url)) resp = get_resp(url, headers=headers) if not resp: return "" resp = resp.text re_pattern = re.compile("href=\"(.*)\">(\\1)</a>") tags = [] for line in resp.splitlines(): result = re_pattern.search(line) if result: tags.append(result[1]) if clean_tag: tags = clean_tags(tags, info) return tags
def get_url(repo_file): """ Get url of given package """ try: repo_yaml = open(repo_file) except FileNotFoundError: print("WARNING: {} can't be found in local path.".format(repo_file)) return None if repo_yaml: pkg_info = yaml.load(repo_yaml, Loader=yaml.Loader) else: return None if not pkg_info: print("WARNING: load {} yaml fail".format(repo_file)) return None return yaml2url.yaml2url(pkg_info)
def check_metacpan(info, clean_tag=True): """ Check perl module version info via metacpan api """ resp = load_last_query_result(info) if resp == "": headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64)'} url = yaml2url.yaml2url(info) print(url) resp = get_resp(url, headers=headers) if not resp: return "" resp = resp.text tags = [] tag_list = resp.splitlines() condition = "value=\"/release" len_tag_list = len(tag_list) - 1 for index in range(len_tag_list): if condition in tag_list[index]: tag = tag_list[index + 1] index = index + 1 if 'DEV' in tag: continue tag = tag.lstrip() tag = tag.rstrip() tags.append(tag) if not tags: eprint("{repo} found unsorted on cpan.metacpan.org".format( repo=info["src_repo"])) return "" last_query = {"time_stamp": datetime.now(), "raw_data": resp} info["last_query"] = last_query if clean_tag: tags = clean_tags(tags, info) return tags
def check_github(info, clean_tag=True): """ Check version info via github api """ resp = load_last_query_result(info) if info.get("query_type", "git-ls") != "git-ls": resp = "" repo_url = yaml2url.yaml2url(info) if resp == "": resp = __check_git_helper(repo_url) last_query = {} last_query["time_stamp"] = datetime.now() last_query["raw_data"] = resp info["last_query"] = last_query info["query_type"] = "git-ls" tags = __git_resp_to_tags(resp) if clean_tag: tags = clean_tags(tags, info) return tags
def check_hg(info, clean_tag=True): """ Check hg version info via json """ eprint("{repo} > Using hg json-tags".format(repo=info["src_repo"] + "/json-tags")) resp = load_last_query_result(info) if resp == "": headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64)'} url = yaml2url.yaml2url(info) resp = get_resp(url, headers=headers) if not resp: return "" resp = resp.text need_trick, url, cookies = dirty_redirect_tricks(url, resp) if need_trick: # I dont want to introduce another dependency on requests # but urllib handling cookie is outragely complex c_dict = {} for cookie in cookies: key, value = cookie.split('=') c_dict[key] = value resp = get_resp(url, headers=headers, cookies=c_dict) if not resp: return "" resp = resp.text last_query = {"time_stamp": datetime.now(), "raw_data": resp} info["last_query"] = last_query # try and except ? tags_json = json.loads(resp) sort_tags = tags_json["tags"] sort_tags.sort(reverse=True, key=lambda x: x['date'][0]) result_list = [tag['tag'] for tag in sort_tags] if clean_tag: result_list = clean_tags(result_list, info) return result_list
def get_oe_repo_dict(cwd_path, use_cache, sig): """ get oe repo list from sigs.yaml """ logging.debug("begin to query oe.") oe_repo_dict = {} last_record_dict = {} my_gitee = gitee.Gitee() data = [] if not sig: sigs = my_gitee.get_sigs() logging.info('start to get sigs info.') for sig_name in sigs.keys(): repo_list = my_gitee.get_repos_by_sig(sig_name) data.extend(repo_list) logging.info('sig: %s -> repo: %s', sig_name, repo_list) else: data = my_gitee.get_repos_by_sig(sig) logging.info("repo need to check: %s", data) if use_cache: last_record_dict = read_pkginfo_lasttime() if len(last_record_dict) == 0: logging.info("last recorder not exist.") for name in data: repo_url = last_record_dict.get(name, None) if repo_url: logging.info("%s has record.", name) else: pkginfo = get_pkg_info(my_gitee, name, cwd_path) if pkginfo: repo_url = yaml2url.yaml2url(pkginfo) if not repo_url: repo_url = 'none' oe_repo_dict.update({name: repo_url}) logging.info("total %d repositories in src-openeuler", len(oe_repo_dict)) record_pkginfo(oe_repo_dict) return oe_repo_dict
def check_rubygem(info, clean_tag=True): """ Check ruby module version info via rubygem api """ resp = load_last_query_result(info) tags = [] if resp == "": headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64)'} url = yaml2url.yaml2url(info) resp = get_resp(url, headers=headers) if not resp: return "" data = resp.json() for release in data: tags.append(release["number"]) if not tags: eprint("{repo} > No Response or JSON parse failed".format( repo=info["src_repo"])) return "" if clean_tag: tags = clean_tags(tags, info) return tags
def check_hg_raw(info, clean_tag=True): """ Check hg version info via raw-tags """ eprint("{repo} > Using hg raw-tags".format(repo=info["src_repo"] + "/raw-tags")) resp = load_last_query_result(info) if resp == "": headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64)'} url = yaml2url.yaml2url(info) resp = get_resp(url, headers=headers) if not resp: return "" resp = resp.text need_trick, url, cookies = dirty_redirect_tricks(url, resp) if need_trick: # I dont want to introduce another dependency on requests # but urllib handling cookie is outragely complex c_dict = {} for cookie in cookies: key, value = cookie.split('=') c_dict[key] = value resp = get_resp(url, headers=headers, cookies=c_dict) if not resp: return "" resp = resp.text last_query = {"time_stamp": datetime.now(), "raw_data": resp} info["last_query"] = last_query tags = [] for line in resp.splitlines(): tags.append(line.split()[0]) if clean_tag: tags = clean_tags(tags, info) return tags