def _get_debian_distnames(unstable=True, obsolete=False, generic=False): ref_site = u'https://wiki.debian.org/DebianReleases' # Names added statically are continually used by Debian project dist_names = [] if generic: if unstable: dist_names.append(u'unstable') dist_names.append(u'testing') dist_names.append(u'stable') if obsolete: dist_names.append(u'oldstable') # NOTE: 'stretch' & 'sid' names are repeatedly used for testing & unstable, # but this could change in the future. if unstable: dist_names.append(u'sid') dist_names.append(u'stretch') page_html = GetRemotePageText(ref_site).split(u'\n') if page_html: # Only add up to max_dists to list max_dists = 6 dists_added = 0 for INDEX in range(len(page_html)): LINE = page_html[INDEX].lower() if u'<p class="line862">' in LINE and LINE.strip().endswith( u'</td>'): stable_version = LINE.split(u'</td>')[0].split( u'>')[-1].strip() if StringIsVersioned(stable_version): dist_names.append(page_html[INDEX + 1].split(u'</a>') [0].split(u'>')[-1].lower().strip()) dists_added += 1 if dists_added >= max_dists: break # First name found should be current stable version if not obsolete: break return dist_names
def _get_mint_distnames(): ref_site = u'https://www.linuxmint.com/download_all.php' page_html = GetRemotePageText(ref_site).split(u'\n') dist_names = [] if page_html: for INDEX in range(len(page_html)): LINE = page_html[INDEX].lower() if u'href="release.php?id=' in LINE: name = LINE.split(u'</a>')[0].split(u'>')[-1].strip() if name and not StringIsVersioned( name) and name not in dist_names: dist_names.append(name) return dist_names
def _get_ubuntu_distnames(unstable=True, obsolete=False): ref_site = u'https://wiki.ubuntu.com/Releases' page_html = GetRemotePageText(ref_site).split(u'\n') dist_names = [] current = [] if unstable: future = [] if obsolete: eol = [] if page_html: for INDEX in range(len(page_html)): LINE = page_html[INDEX].lower() if u'id="current"' in LINE and len(current) < 2: current.append(INDEX + 8) continue if u'id="future"' in LINE: if len(current) < 2: current.append(INDEX) if unstable and len(future) < 2: future.append(INDEX + 8) continue if u'id="end_of_life"' in LINE: if unstable and len(future) < 2: future.append(INDEX) if obsolete and len(eol) < 2: eol.append(INDEX + 8) eol.append(len(page_html) - 1) break # Lines containing these strings will be ignored skip_lines = ( u'releasenotes', u'class="http', ) # Add names in order of newest first if unstable and len(future) > 1: future = page_html[future[0]:future[1]] for LINE in future: LINE = LINE.lower() if u'class="line891"' in LINE and not Contains( LINE, skip_lines): name = LINE.split(u'</a>')[0].split( u'>')[-1].strip().split(u' ')[0] if name and name not in dist_names: dist_names.append(name) if len(current) > 1: current = page_html[current[0]:current[1]] for LINE in current: LINE = LINE.lower() if u'class="line891"' in LINE and not Contains( LINE, skip_lines): name = LINE.split(u'</a>')[0].split( u'>')[-1].strip().split(u' ')[0] if name and name not in dist_names: dist_names.append(name) if obsolete and len(eol) > 1: eol = page_html[eol[0]:eol[1]] # Maximum number of obsolete dists that will be added eol_max = 6 eol_added = 0 for LINE in eol: LINE = LINE.lower() if u'class="line891"' in LINE and not Contains( LINE, skip_lines): name = LINE.split(u'</a>')[0].split( u'>')[-1].strip().split(u' ')[0] if name and name not in dist_names: dist_names.append(name) eol_added += 1 if eol_added >= eol_max: break return dist_names