def Parse(self, path): result = [] jsondata = None with open(path, 'r', encoding='utf-8') as jsonfile: jsondata = json.load(jsonfile) for packagedata in SimplifyResult(jsondata): entity = packagedata['project'].rsplit( '/', 1)[-1] # this is URL, take only the ID from it # use Arch and AUR package names as a name, as they are most non-ambigous names = [] for field in ['arch_packages', 'aur_packages']: if packagedata[field]: names = packagedata[field].split(', ') break # generate a package for each package name; these will be merged anyway for name in set(names): # generate a package for each version for version in packagedata['versions'].split(', '): version, *flags = version.split('|') is_devel = 'U' in flags is_foreign_os_release = 'O' in flags and 'L' not in flags if is_foreign_os_release: print( 'WARNING: {} ({}) version {} skipped as non-linux release' .format(packagedata['projectLabel'], entity, version), file=sys.stderr) continue pkg = Package() pkg.SetFlag(PackageFlags.devel, is_devel) pkg.name = entity pkg.effname = name pkg.version = version if 'projectDescription' in packagedata: pkg.comment = packagedata['projectDescription'] else: pkg.comment = packagedata['projectLabel'] if packagedata['licenses']: pkg.licenses = packagedata['licenses'].split(', ') if packagedata['websites']: pkg.homepage = packagedata['websites'].split(', ')[ 0] # XXX: use all websites when supported result.append(pkg) return result
def Parse(self, path): result = [] with open(path, encoding='utf-8') as indexfile: for line in indexfile: fields = line.strip().split('|') if len(fields) != 12: print('WARNING: package {} skipped, incorrect number of fields in INDEX'.format(fields[0]), file=sys.stderr) continue pkg = Package() pkg.name, version = SplitPackageNameVersion(fields[0]) pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = fields[3] if fields[11]: pkg.homepage = fields[11] # sometimes OWNER variable is used in which case # there's no MAINTAINER OWNER doesn't get to INDEX pkg.maintainers = GetMaintainers(fields[5]) pkg.category = fields[6].split(' ')[0] result.append(pkg) return result
def Parse(self, path): result = [] jsondata = None with open(path, 'r', encoding='utf-8') as jsonfile: jsondata = json.load(jsonfile) for packagedata in jsondata['ravenports']: pkg = Package() pkg.name = packagedata['namebase'] pkg.version = packagedata['version'] pkg.category = packagedata['keywords'][0] if 'homepage' in packagedata: pkg.homepage = packagedata['homepage'] pkg.downloads = packagedata['distfile'] pkg.comment = packagedata['variants'][0]['sdesc'] pkg.extrafields['bucket'] = packagedata['bucket'] pkg.extrafields['variant'] = packagedata['variants'][0]['label'] result.append(pkg) return result
def ParsePackage(self, fmri, pkgdata): variables = {} for action in pkgdata['actions']: tokens = shlex.split(action) if not tokens or tokens.pop(0) != 'set': print('WARNING: unrecognized action ' + action, file=sys.stderr) continue key = None value = [] for token in tokens: if token.startswith('name='): key = token[5:] elif token.startswith('value='): value.append(token[6:]) elif token.startswith('last-fmri='): pass else: print('WARNING: unrecognized token ' + token, file=sys.stderr) continue if key and value: variables[key] = value pkg = Package() pkg.extrafields['fmri'] = fmri if 'com.oracle.info.name' in variables: pkg.name = variables['com.oracle.info.name'][0] if 'com.oracle.info.version' in variables: pkg.version = variables['com.oracle.info.version'][0] if 'pkg.summary' in variables: pkg.comment = variables['pkg.summary'][0] if 'info.classification' in variables: pkg.category = variables['info.classification'][0] if pkg.category.startswith('org.opensolaris.category.2008:'): pkg.category = pkg.category.split(':', 1)[1] if 'info.upstream-url' in variables: pkg.homepage = variables['info.upstream-url'][0] if 'info.source-url' in variables: pkg.downloads = variables['info.source-url'] # Regarding comment requirement: there are some packages which lack it, # however for ALL of them is a counterpart with comment and some # additional fields (category, homepage, downloads). Packages without # comment look like legacy, and it's OK and desirable to drop them here if pkg.name and pkg.version and pkg.comment: return pkg return None
def Parse(self, path): result = [] with open(path, encoding='utf-8') as indexfile: for line in indexfile: fields = line.strip().split('|') if len(fields) != 13: print( 'WARNING: package {} skipped, incorrect number of fields in INDEX' .format(fields[0]), file=sys.stderr) continue pkg = Package() pkg.name, version = SplitPackageNameVersion(fields[0]) pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = fields[3] pkg.maintainers = GetMaintainers(fields[5]) pkg.category = fields[6].split(' ')[0] if fields[9]: pkg.homepage = fields[9] result.append(pkg) return result
def Parse(self, path): packages = [] for filename in os.listdir(path): if not filename.endswith('.json'): continue with open(os.path.join(path, filename), 'r') as jsonfile: for result in json.load(jsonfile)['results']: pkg = Package() pkg.name = result['Name'] pkg.version, pkg.origversion = SanitizeVersion( result['Version']) pkg.comment = result['Description'] pkg.homepage = result['URL'] if 'License' in result: for license_ in result['License']: pkg.licenses.append(license_) if 'Maintainer' in result and result['Maintainer']: pkg.maintainers += extract_maintainers( result['Maintainer'] + '@aur') packages.append(pkg) return packages
def Parse(self, path): packages = [] for pkgdir in os.listdir(os.path.join(path, 'ports')): controlpath = os.path.join(path, 'ports', pkgdir, 'CONTROL') if not os.path.exists(controlpath): continue pkg = Package(name=pkgdir) with open(controlpath, 'r', encoding='utf-8', errors='ignore') as controlfile: for line in controlfile: line = line.strip() if line.startswith('Version:'): version = line[8:].strip() match = re.match('[0-9]{4}[.-][0-9]{1,2}[.-][0-9]{1,2}', version) if match: pkg.version = version pkg.ignoreversion = True else: pkg.version, pkg.origversion = SanitizeVersion(line[8:].strip()) elif line.startswith('Description:'): comment = line[12:].strip() if comment: pkg.comment = comment if not pkg.version: print('WARNING: unable to parse port {}: no version'.format(pkgdir), file=sys.stderr) continue packages.append(pkg) return packages
def Parse(self, path): result = [] with open(path, 'r', encoding='utf-8') as jsonfile: for package in json.load(jsonfile)['packages']: pkg = Package() pkg.name = package['name'] if package['version'] is None: print('no version: {}'.format(pkg.name), file=sys.stderr) continue pkg.version, _ = SanitizeVersion(package['version']) pkg.origversion = package['full_version'] pkg.category = package['pkg_section'] or package['section'] pkg.comment = package['description'] pkg.maintainers = GetMaintainers(package['committer']) if pkg.version == '999': pkg.SetFlag(PackageFlags.ignore) # XXX: rolling? revisit result.append(pkg) return result
def Parse(self, path): result = [] root = xml.etree.ElementTree.parse(path) for entry in root.findall( '{http://linux.duke.edu/metadata/common}package'): pkg = Package() pkg.name = entry.find( '{http://linux.duke.edu/metadata/common}name').text version = entry.find( '{http://linux.duke.edu/metadata/common}version').attrib['ver'] pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = entry.find( '{http://linux.duke.edu/metadata/common}summary').text pkg.homepage = entry.find( '{http://linux.duke.edu/metadata/common}url').text pkg.category = entry.find( '{http://linux.duke.edu/metadata/common}format/' '{http://linux.duke.edu/metadata/rpm}group').text pkg.licenses.append( entry.find('{http://linux.duke.edu/metadata/common}format/' '{http://linux.duke.edu/metadata/rpm}license').text) packager = entry.find( '{http://linux.duke.edu/metadata/common}packager').text if packager: pkg.maintainers = GetMaintainers(packager) result.append(pkg) return result
def Parse(self, path): packages = [] with open(os.path.join(path, 'APKINDEX'), 'r', encoding='utf-8') as apkindex: state = {} for line in apkindex: line = line.strip() if line: state[line[0]] = line[2:].strip() continue if not state: continue if state['P'] != state['o']: continue pkg = Package() pkg.name = state['P'] pkg.version, pkg.origversion = SanitizeVersion(state['V']) pkg.comment = state['T'] pkg.homepage = state['U'] # XXX: switch to homepages, split pkg.licenses = [state['L']] if 'm' in state: pkg.maintainers = GetMaintainers(state['m']) state = {} packages.append(pkg) return packages
def Parse(self, path): result = [] for header in rpm.readHeaderListFromFile(path): fields = { key: str(header[key], self.encoding) if header[key] is not None else None for key in ['name', 'version', 'release', 'packager', 'group', 'summary'] } pkg = Package() pkg.name = fields['name'] pkg.version = fields['version'] # XXX: handle release if fields['packager']: pkg.maintainers = extract_maintainers( fields['packager']) # XXX: may have multiple maintainers pkg.category = fields['group'] pkg.comment = fields['summary'] result.append(pkg) return result
def iter_parse(self, path): with open(path, encoding='utf-8') as indexfile: for line in indexfile: fields = line.strip().split('|') if len(fields) != 12: print( 'WARNING: package {} skipped, incorrect number of fields in INDEX' .format(fields[0]), file=sys.stderr) continue if not fields[0]: print('WARNING: line {} bogus, critical fields are empty'. format(line.strip()), file=sys.stderr) continue pkg = Package() pkg.name, version = fields[0].rsplit('-', 1) pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = fields[3] if fields[11]: pkg.homepage = fields[11] # sometimes OWNER variable is used in which case # there's no MAINTAINER OWNER doesn't get to INDEX pkg.maintainers = extract_maintainers(fields[5]) pkg.category = fields[6].split(' ')[0] pkg.extrafields['portname'] = fields[1].split('/')[-1] pkg.extrafields['origin'] = fields[1] yield pkg
def Parse(self, path): result = [] with open(path, 'r', encoding='utf-8') as jsonfile: for packagedata in json.load(jsonfile): pkg = Package() pkg.name = packagedata['name'] pkg.version, pkg.origversion = SanitizeVersion( packagedata['version']) pkg.comment = packagedata['description'] pkg.homepage = packagedata['homepage'] if 'srcurl' in packagedata: pkg.downloads = [packagedata['srcurl']] match = re.search(' @([^ ]+)$', packagedata['maintainer']) if match: pkg.maintainers = [match.group(1).lower() + '@termux'] else: pkg.maintainers = extract_maintainers( packagedata['maintainer']) result.append(pkg) return result
def Parse(self, path): result = [] with open(path, encoding='utf-8') as file: reader = csv.reader(file, delimiter='|') for row in reader: pkg = Package() pkgname = row[0] # cut away string suffixws which come after version match = re.match('(.*?)(-[a-z_]+[0-9]*)+$', pkgname) if match is not None: pkgname = match.group(1) pkg.name, version = SplitPackageNameVersion(pkgname) pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = row[3] pkg.maintainers = GetMaintainers(row[5]) pkg.category = row[6].split(' ')[0].strip() origin = row[1].rsplit(',', 1)[0] pkg.extrafields['portname'] = origin.split('/')[1] pkg.extrafields['origin'] = origin result.append(pkg) return result
def Parse(self, path): result = [] for pagefilename in os.listdir(path): if not pagefilename.endswith('.json'): continue pagepath = os.path.join(path, pagefilename) with open(pagepath, 'r', encoding='utf-8', errors='ignore') as pagedata: for crate in json.load(pagedata)['crates']: pkg = Package() pkg.name = crate['id'] pkg.version = crate['max_version'] if crate['description']: pkg.comment = crate['description'].strip() if crate['homepage']: pkg.homepage = crate['homepage'] elif crate['repository']: pkg.homepage = crate['repository'] result.append(pkg) return result
def Parse(self, path): result = [] for filename in os.listdir(path): if not filename.endswith('.html'): continue root = None with open(os.path.join(path, filename), encoding='utf-8') as htmlfile: root = lxml.html.document_fromstring(htmlfile.read()) for row in root.xpath('.//div[@class="package-preview"]'): pkg = Package() # header cell = row.xpath('./h3[@class="package-name"]')[0] pkg.name, version = cell.text.split(' ', 1) pkg.version, pkg.origversion = SanitizeVersion(version.strip()) pkg.comment = cell.xpath('./span[@class="package-synopsis"]')[0].text.strip().strip('—').strip() or None # details for cell in row.xpath('./ul[@class="package-info"]/li'): key = cell.xpath('./b')[0].text if key == 'License:': pkg.licenses = [a.text for a in cell.xpath('./a')] elif key == 'Website:': pkg.homepage = cell.xpath('./a')[0].attrib['href'] elif key == 'Package source:': pkg.extrafields['source'] = cell.xpath('./a')[0].text result.append(pkg) return result
def Parse(self, path): packages = {} with open(path, 'r', encoding='utf-8') as htmlfile: for match in re.findall( '<td><a href="/pypi/([^"]+)/([^"]+)">[^<>]*</a></td>[ \n]*<td>([^<>]*)</td>', htmlfile.read(), flags=re.MULTILINE): pkg = Package() pkg.name = match[0] pkg.version = match[1] comment = match[2].strip() if comment == '': print('{}: summary is empty'.format(pkg.name), file=sys.stderr) elif '\n' in comment: print('{}: summary is multiline'.format(pkg.name), file=sys.stderr) else: pkg.comment = comment pkg.homepage = 'https://pypi.python.org/pypi/{}/{}'.format( match[0], match[1]) packages[pkg.name] = pkg return [package for package in packages.values()]
def Parse(self, path): result = [] jsondata = None with open(path, 'r', encoding='utf-8') as jsonfile: jsondata = json.load(jsonfile) if not jsondata['success']: raise RuntimeError('non-success json reply, cannot parse') for packagedata in jsondata['data']['apps']: pkg = Package() pkg.name = packagedata['title'] pkg.version = packagedata['version'] pkg.licenses = [packagedata['license']] if 'tagline' in packagedata: pkg.comment = packagedata['tagline'] if 'support' in packagedata: pkg.homepage = packagedata['support'] result.append(pkg) return result
def Parse(self, path): result = [] with open(path, encoding='utf-8') as indexfile: for line in indexfile: fields = line.strip().split('|') if len(fields) != 13: print( 'WARNING: package {} skipped, incorrect number of fields in INDEX' .format(fields[0]), file=sys.stderr) continue pkg = Package() pkg.name, version = fields[0].rsplit('-', 1) pkg.version, pkg.origversion = SanitizeVersion(version) pkg.comment = fields[3] pkg.maintainers = extract_maintainers(fields[5]) pkg.category = fields[6].split(' ')[0] if fields[12]: pkg.homepage = fields[12] path = fields[1].split('/') pkg.extrafields['portname'] = path[-1] pkg.extrafields['origin'] = '/'.join(path[-2:]) result.append(pkg) return result
def Parse(self, path): result = [] for pagepath in os.listdir(path): if not pagepath.endswith('.xml'): continue root = xml.etree.ElementTree.parse(os.path.join(path, pagepath)) for entry in root.findall('{http://www.w3.org/2005/Atom}entry'): pkg = Package() pkg.name = entry.find( '{http://www.w3.org/2005/Atom}title').text pkg.version = entry.find( '{http://schemas.microsoft.com/ado/2007/08/dataservices/metadata}properties/{http://schemas.microsoft.com/ado/2007/08/dataservices}Version' ).text pkg.homepage = entry.find( '{http://schemas.microsoft.com/ado/2007/08/dataservices/metadata}properties/{http://schemas.microsoft.com/ado/2007/08/dataservices}ProjectUrl' ).text pkg.comment = entry.find( '{http://www.w3.org/2005/Atom}summary').text result.append(pkg) return result
def Parse(self, path): result = {} # note that we actually parse database prepared by # fetcher, not the file we've downloaded with open(path, 'r', encoding='utf-8') as jsonfile: for entry in json.load(jsonfile)['releases']: pkg = Package() pkg.name = entry['name'] pkg.version = entry['version'] if not pkg.name or not pkg.version: continue homepage = entry.get('homepage') summary = entry.get('summary') description = entry.get('description') #submitter = entry.get('submitter') #download = entry.get('download') license_ = entry.get('license') if homepage: pkg.homepage = homepage if summary: pkg.comment = summary elif description: pkg.comment = description # multiline if license_: pkg.licenses = [license_] # unfiltered garbage #if submitter: # pkg.maintainers = [submitter + '@freshcode'] # ignore for now, may contain download page urls instead of file urls #if download # pkg.downloads = [download] if pkg.name not in result or version_compare( pkg.version, result[pkg.name].version) > 0: result[pkg.name] = pkg return result.values()
def Parse(self, path): result = [] with subprocess.Popen([repology.config.TCLSH, self.helperpath, path], errors='ignore', stdout=subprocess.PIPE, universal_newlines=True) as macportsjson: for pkgdata in json.load(macportsjson.stdout): pkg = Package() pkg.name = pkgdata['name'] pkg.version = pkgdata['version'] # drop obsolete ports (see #235) if 'replaced_by' in pkgdata: continue if 'description' in pkgdata: pkg.comment = pkgdata['description'] if 'homepage' in pkgdata: pkg.homepage = pkgdata['homepage'] if 'categories' in pkgdata: pkg.category = pkgdata['categories'].split()[0] if 'license' in pkgdata: pkg.licenses = [pkgdata['license'] ] # XXX: properly handle braces if 'maintainers' in pkgdata: for maintainer in pkgdata['maintainers'].replace( '{', '').replace('}', '').lower().split(): if maintainer.startswith('@'): # @foo means github user foo pkg.maintainers.append(maintainer[1:] + '@github') elif '@' in maintainer: # plain email pkg.maintainers.append(maintainer) elif ':' in maintainer: # foo.com:bar means [email protected] host, user = maintainer.split(':', 1) pkg.maintainers.append(user + '@' + host) elif maintainer == 'openmaintainer': # ignore, this is a flag that minor changes to a port # are allowed without involving the maintainer pass else: # otherwise it's [email protected] pkg.maintainers.append(maintainer + '@macports.org') pkg.extrafields['portdir'] = pkgdata['portdir'] pkg.extrafields['portname'] = pkgdata['portdir'].split('/')[1] result.append(pkg) return result
def Parse(self, path): packages = [] for moduledir in os.listdir(path): modulepath = os.path.join(path, moduledir) cabalpath = None maxversion = None for versiondir in os.listdir(modulepath): if versiondir == 'preferred-versions': continue if maxversion is None or version_compare( versiondir, maxversion) > 0: maxversion = versiondir cabalpath = os.path.join(path, moduledir, maxversion, moduledir + '.cabal') if maxversion is None: print('WARNING: cannot determine max version for {}'.format( moduledir), file=sys.stderr) continue pkg = Package() pkg.name = moduledir pkg.version = maxversion pkg.homepage = 'http://hackage.haskell.org/package/' + moduledir cabaldata = self.ParseCabal(cabalpath) if cabaldata['name'] == pkg.name and version_compare( cabaldata['version'], pkg.version) == 0: if 'synopsis' in cabaldata and cabaldata['synopsis']: pkg.comment = cabaldata['synopsis'].strip() if 'maintainer' in cabaldata: pkg.maintainers = extract_maintainers( cabaldata['maintainer']) if 'license' in cabaldata: pkg.licenses = [cabaldata['license']] if 'homepage' in cabaldata and ( cabaldata['homepage'].startswith('http://') or cabaldata['homepage'].startswith('https://')): pkg.homepage = cabaldata['homepage'] if 'category' in cabaldata: pkg.category = cabaldata['category'] else: print( 'WARNING: cabal data sanity check failed for {}, ignoring cabal data' .format(cabalpath), file=sys.stderr) packages.append(pkg) return packages
def ParsePackage(self, pkgpath, pkgdata): variables = {} for action in pkgdata['actions']: tokens = shlex.split(action) if not tokens or tokens.pop(0) != 'set': print('WARNING: unrecognized action ' + action, file=sys.stderr) continue key = None value = [] for token in tokens: if token.startswith('name='): key = token[5:] elif token.startswith('value='): value.append(token[6:]) elif token.startswith('last-fmri='): pass else: print('WARNING: unrecognized token ' + token, file=sys.stderr) continue if key and value: variables[key] = value pkg = Package() pkg.extrafields['path'] = pkgpath if 'com.oracle.info.name' in variables: pkg.name = variables['com.oracle.info.name'][0] if 'com.oracle.info.version' in variables: pkg.version = variables['com.oracle.info.version'][0] if 'pkg.summary' in variables: pkg.comment = variables['pkg.summary'][0] if 'info.classification' in variables: pkg.category = variables['info.classification'][0] if pkg.category.startswith('org.opensolaris.category.2008:'): pkg.category = pkg.category.split(':', 1)[1] if 'info.upstream-url' in variables: pkg.homepage = variables['info.upstream-url'][0] if 'info.source-url' in variables: pkg.downloads = variables['info.source-url'] if pkg.name and pkg.version: return pkg return None
def Parse(self, path): packages = [] for pkgdir in os.listdir(os.path.join(path, 'ports')): controlpath = os.path.join(path, 'ports', pkgdir, 'CONTROL') if not os.path.exists(controlpath): continue pkg = Package(name=pkgdir) with open(controlpath, 'r', encoding='utf-8', errors='ignore') as controlfile: for line in controlfile: line = line.strip() if line.startswith('Version:'): version = line[8:].strip() match = re.match( '[0-9]{4}[.-][0-9]{1,2}[.-][0-9]{1,2}', version) if match: pkg.version = version pkg.SetFlag(PackageFlags.ignore) else: pkg.version, pkg.origversion = SanitizeVersion( line[8:].strip()) elif line.startswith('Description:'): comment = line[12:].strip() if comment: pkg.comment = comment # pretty much a hack to shut a bunch of fake versions up portfilepath = os.path.join(path, 'ports', pkgdir, 'portfile.cmake') if os.path.exists(portfilepath): with open(portfilepath, 'r', encoding='utf-8', errors='ignore') as portfile: for line in portfile: if 'libimobiledevice-win32' in line: print( 'WARNING: marking version for {} as untrusted, https://github.com/libimobiledevice-win32 accused of version faking' .format(pkg.name), file=sys.stderr) pkg.SetFlag(PackageFlags.untrusted) break if not pkg.version: print('WARNING: unable to parse port {}: no version'.format( pkgdir), file=sys.stderr) continue packages.append(pkg) return packages
def parse_package(fields): pkg = Package() pkg.name = ensure_str(fields['distribution']) pkg.version = ensure_str(fields['version']) pkg.maintainers = [ensure_str(fields['author']).lower() + '@cpan'] pkg.licenses = ensure_list(fields['license']) pkg.comment = ensure_str(fields.get('abstract')) pkg.homepage = ensure_str(fields.get('resources.homepage')) pkg.downloads = ensure_list(fields.get('download_url')) return pkg
def Parse(self, path): packages = [] for packagedir in os.listdir(path): with open(os.path.join(path, packagedir, 'desc'), 'r', encoding='utf-8') as descfile: key = None value = [] data = {} for line in descfile: line = line.strip() if line.startswith('%') and line.endswith('%'): key = line[1:-1] value = [] elif line == '': data[key] = value else: value.append(line) if 'BASE' in data and data['NAME'][0] != data['BASE'][0]: print('{} skipped, subpackage'.format(data['NAME'][0]), file=sys.stderr) continue pkg = Package() pkg.name = data['NAME'][0] pkg.version, pkg.origversion = SanitizeVersion( data['VERSION'][0]) if 'DESC' in data: pkg.comment = data['DESC'][0] if 'URL' in data: pkg.homepage = data['URL'][0] if 'LICENSE' in data: pkg.licenses = data['LICENSE'] pkg.maintainers = sum( map(extract_maintainers, data['PACKAGER']), []) if 'GROUPS' in data: pkg.category = data['GROUPS'][0] packages.append(pkg) return packages
def Parse(self, path): result = [] for row in lxml.html.parse(path).getroot().xpath('.//table')[0].xpath('./tbody/tr'): pkg = Package() pkg.name = row.xpath('./td[1]/a')[0].text pkg.version = row.xpath('./td[2]')[0].text pkg.comment = row.xpath('./td[3]')[0].text pkg.licenses = [row.xpath('./td[4]')[0].text] result.append(pkg) return result
def Parse(self, path): packages = [] for pkgdir in os.listdir(path): pkgpath = os.path.join(path, pkgdir, 'Pkgfile') if not os.path.exists(pkgpath): continue with open(pkgpath, 'r', encoding='utf-8', errors='ignore') as pkgfile: pkg = Package() for line in pkgfile: line = line.strip() if line.startswith('# Description:'): if not pkg.comment: pkg.comment = line[14:].strip() else: print('WARNING: duplicate Description for {}'.format(pkgdir), file=sys.stderr) if line.startswith('# URL:'): if not pkg.homepage: pkg.homepage = line[6:].strip() else: print('WARNING: duplicate URL for {}'.format(pkgdir), file=sys.stderr) if line.startswith('# Maintainer:'): maintainer = line[13:].strip() if ',' in maintainer: _, email = line[13:].strip().split(',', 1) pkg.maintainers += extract_maintainers(email) else: print('WARNING: bad Maintainer format for {}'.format(pkgdir), file=sys.stderr) if line.startswith('name=') and not pkg.name: pkg.name = line[5:] if line.startswith('version=') and not pkg.version: pkg.version = line[8:] if not pkg.name or not pkg.version: print('WARNING: unable to parse port form {}: no name or version'.format(pkgdir), file=sys.stderr) continue if '$' in pkg.name or '$' in pkg.version: print('WARNING: unable to parse port form {}: name or version contain variables'.format(pkgdir), file=sys.stderr) continue packages.append(pkg) return packages
def Parse(self, path): result = [] for package in os.listdir(path): desc_path = os.path.join(path, package, 'desc') if not os.path.isfile(desc_path): continue with open(desc_path, encoding='utf-8') as file: pkg = Package() tag = None for line in file: line = line.strip() if line == '': tag = None elif tag == 'NAME': pkg.name = line elif tag == 'VERSION': pkg.version, pkg.origversion = SanitizeVersion(line) elif tag == 'DESC': if pkg.comment is None: pkg.comment = '' if pkg.comment != '': pkg.comment += '\n' pkg.comment += line elif tag == 'URL': pkg.homepage = line elif tag == 'LICENSE': pkg.licenses.append(line) elif tag == 'PACKAGER': pkg.maintainers += extract_maintainers(line) elif tag == 'BASE': pkg.extrafields['base'] = line elif line.startswith('%') and line.endswith('%'): tag = line[1:-1] if pkg.name is not None and pkg.version is not None: result.append(pkg) else: print( 'WARNING: %s skipped, likely due to parsing problems' % package, file=sys.stderr) return result