def get(package_id, package_version, urls, download_location): package_found = False log.info("Searching for package: "+str(package_id)+" in "+str(urls)) if not os.path.exists(download_location): os.makedirs(download_location) handle = None if '://' in urls: with urllib.request.urlopen(urls) as uf: handle = StringIO(uf.read()) elif os.path.exists(urls): handle = open(urls, 'r') else: raise Exception("--urls option does not look like a url or a file path") for ld in yield_packages(handle): # TODO: check platform/architecture, failover to all if available? # iid, version, platform, architecture, upstream_url, checksum, alternate_url = line.split('\t') if ld['id'] == package_id.strip() and (package_version == None or ld['version'] == package_version): package_found = True # I worry about this being unreliable. TODO: add target filename column? pkg_name = package_name(ld) storage_path = os.path.join(download_location, pkg_name) url = get_url(ld) urllib.request.urlretrieve(url, storage_path) download_checksum = hashlib.sha256(open(storage_path, 'rb').read()).hexdigest() if ld['sha256sum'] != download_checksum: log.error('Checksum does not match, something seems to be wrong.\n' '{expected}\t(expected)\n{actual}\t(downloaded)').format( expected=ld['sha256sum'], actual=download_checksum) else: log.info('Download successful for %s.' % (pkg_name)) if not package_found: log.warning('Package (%s) could not be found in this server.' % (package_id))
def get(package_id, package_version, download_location): package_found = False database = PACKAGE_SERVER + 'urls.tsv' log.info("Searching for package: "+str(package_id)+" in "+str(database)) if not os.path.exists(download_location): os.makedirs(download_location) for ld in yield_packages(urllib2.urlopen(database)): # TODO: check platform/architecture, failover to all if available? # iid, version, platform, architecture, upstream_url, checksum, alternate_url = line.split('\t') if ld['id'] == package_id.strip() and ld['platform']== 'src' and (package_version == None or ld['version'] == package_version): package_found = True # I worry about this being unreliable. TODO: add target filename column? pkg_name = package_name(ld) storage_path = os.path.join(download_location, pkg_name) url = get_url(ld) urllib.urlretrieve(url, storage_path) download_checksum = hashlib.sha256(open(storage_path, 'rb').read()).hexdigest() if ld['sha256sum'] != download_checksum: log.error('Checksum does not match, something seems to be wrong.\n' '{expected}\t(expected)\n{actual}\t(downloaded)').format( expected=ld['sha256sum'], actual=download_checksum) else: log.info('Download successful for %s.' % (pkg_name)) if not package_found: log.warning('Package (%s) could not be found in this server.' % (package_id))
def main(): retcode = 0 for package in yield_packages(sys.stdin): # For piped in diff, header can appear as +# Id ... if package['id'].startswith("+#"): continue print(package) # Remove the '+' at the beginning package['id'] = package['id'][1:] output_package_path = package_to_path(**package) + package['ext'] err = download_url(package['url'], output_package_path) if err is not None: log.error("Could not download file: %s", err) retcode = 1 else: log.info("%s downloaded successfully", output_package_path) err = verify_file(output_package_path, package['sha256sum']) if err is not None: log.error("Could not verify file: %s", err) retcode = 1 else: log.info("%s verified successfully with hash %s", output_package_path, package['sha256sum']) exit(retcode)
def main(): retcode = 0 for package in yield_packages(sys.stdin): # For piped in diff, header can appear as +# Id ... if package['id'].startswith("+#"): continue print package # Remove the '+' at the beginning package['id'] = package['id'][1:] output_package_path = package_to_path(**package) + package['ext'] err = download_url(package['url'], output_package_path) if err is not None: log.error("Could not download file: %s", err) retcode = 1 else: log.info("%s downloaded successfully", output_package_path) err = verify_file(output_package_path, package['sha256sum']) if err is not None: log.error("Could not verify file: %s", err) retcode = 1 else: log.info("%s verified successfully with hash %s", output_package_path, package['sha256sum']) exit(retcode)
def main(galaxy_package_file): visited_paths = [] api_data = {'data': []} with open(galaxy_package_file, 'r') as handle: retcode = 0 xunit = XUnitReportBuilder() for ld in yield_packages(handle): nice_name = package_to_path(**ld) if not os.path.exists(ld['id']): os.makedirs(ld['id']) output_package_path = os.path.join(ld['id'], nice_name) + ld['ext'] visited_paths.append(os.path.abspath(output_package_path)) tmpld = {} tmpld.update(ld) tmpld['_gen'] = output_package_path api_data['data'].append(tmpld) if os.path.exists(output_package_path) and os.path.getsize(output_package_path) == 0: log.error("Empty download, removing %s %s", ld['url'], output_package_path) cleanup_file(output_package_path) if os.path.exists(output_package_path): log.debug("URL exists %s", ld['url']) xunit.skip(nice_name) else: log.info("URL missing, downloading %s to %s", ld['url'], output_package_path) if ld['url'].startswith('/'): err = symlink_depot(ld['url'], output_package_path) else: err = download_url(ld['url'], output_package_path) if err is not None: xunit.failure(nice_name, "DownloadError", err) cleanup_file(output_package_path) continue # Check sha256sum of download err = verify_file(output_package_path, ld['sha'].strip()) if err is not None: xunit.error(nice_name, "Sha256sumError", err) cleanup_file(output_package_path) continue xunit.ok(nice_name) with open('report.xml', 'w') as xunit_handle: xunit_handle.write(xunit.serialize()) print(json.dumps(api_data, indent=2)) sys.exit(retcode)
def main(galaxy_package_file): with open(galaxy_package_file, 'r') as handle: retcode = 0 identifiers = set() for ld, lineno, line, extraret in yield_packages(handle, retcode=retcode, meta=True): if extraret > 0: retcode = extraret try: for x in HEADER_KEYS[0:5] + HEADER_KEYS[6:6]: # Skip extension, as it is OK to be empty if ld.get(x, '').strip() == '': log.error("[%s] Empty %s", lineno, x) retcode = 1 if ld['platform'] not in ('linux', 'windows', 'darwin', 'src'): log.error("[%s] Unknown platform %s", lineno, ld['platform']) retcode = 1 if ld['arch'] not in ('x32', 'x64', 'all'): log.error("[%s] Unknown architecture %s", lineno, ld['arch']) retcode = 1 if len(ld['sha256sum']) != 64: log.error("[%s] Bad checksum %s", lineno, ld['sha256sum']) retcode = 1 if ld['sha256sum'] != ld['sha256sum'].lower(): log.error("[%s] Uppercase checksum needs to be lower case %s", lineno, ld['sha256sum']) retcode = 1 if len(ld['sha256sum']) != 64: log.error("[%s] sha256sum is not 64 characters long. Typo? %s", lineno, ld['sha256sum']) retcode = 1 if any([x not in list('1234567890abcdef') for x in list(ld['sha256sum'])]): log.error("[%s] sha256sum contains non-hexidecimal characters. Typo? %s", lineno, ld['sha256sum']) retcode = 1 if ld['upstream_first'] not in ('True', 'False'): log.error("[%s] Upstream first must be 'True' or 'False', was '%s'", lineno, ld['upstream_first']) retcode = 1 platform_id = (ld['id'], ld['version'], ld['platform'], ld['arch']) if platform_id in identifiers: log.error("[%s] identifier is not unique: '%s'", lineno, platform_id) retcode = 1 else: identifiers.add(platform_id) except Exception, e: log.error("[%s] Line (probably) not tabbed properly: %s", lineno, e) retcode = 1 sys.exit(retcode)
def main(galaxy_package_file, id, version=None): with open(galaxy_package_file, 'r') as handle: for ld in yield_packages(handle): if ld['id'].lower() != id.lower(): continue if version is not None and ld['version'].lower() != version.lower(): continue print """<action type="download_by_url" sha256sum="{0[sha]}"> {1} </action>""".format(ld, get_url(ld))
def main(galaxy_package_file, id, version=None): with open(galaxy_package_file, 'r') as handle: for ld in yield_packages(handle): if ld['id'].lower() != id.lower(): continue if version is not None and ld['version'].lower() != version.lower(): continue print("""<action type="download_by_url" sha256sum="{0[sha]}"> {1} </action>""".format(ld, get_url(ld)))
def main(galaxy_package_file): with open(galaxy_package_file, "r") as handle: retcode = 0 identifiers = set() for ld, lineno, line, extraret in yield_packages(handle, retcode=retcode, meta=True): if extraret > 0: retcode = extraret try: for x in HEADER_KEYS[0:5] + HEADER_KEYS[6:6]: # Skip extension, as it is OK to be empty if ld.get(x, "").strip() == "": log.error("[%s] Empty %s", lineno, x) retcode = 1 if ld["platform"] not in ("linux", "windows", "darwin", "src"): log.error("[%s] Unknown platform %s", lineno, ld["platform"]) retcode = 1 if ld["arch"] not in ("x32", "x64", "all"): log.error("[%s] Unknown architecture %s", lineno, ld["arch"]) retcode = 1 if len(ld["sha256sum"]) != 64: log.error("[%s] Bad checksum %s", lineno, ld["sha256sum"]) retcode = 1 if ld["sha256sum"] != ld["sha256sum"].lower(): log.error("[%s] Uppercase checksum needs to be lower case %s", lineno, ld["sha256sum"]) retcode = 1 if ld["upstream_first"] not in ("True", "False"): log.error("[%s] Upstream first must be 'True' or 'False', was '%s'", lineno, ld["upstream_first"]) retcode = 1 platform_id = (ld["id"], ld["version"], ld["platform"], ld["arch"]) if platform_id in identifiers: log.error("[%s] identifier is not unique: '%s'", lineno, platform_id) retcode = 1 else: identifiers.add(platform_id) except Exception, e: log.error("[%s] Line (probably) not tabbed properly: %s", lineno, e) retcode = 1 sys.exit(retcode)
def main(galaxy_package_file): with open(galaxy_package_file, 'r') as handle: retcode = 0 identifiers = set() for ld, lineno, line, extraret in yield_packages(handle, retcode=retcode, meta=True): if extraret > 0: retcode = extraret try: for x in HEADER_KEYS[0:5] + HEADER_KEYS[6:6]: # Skip extension, as it is OK to be empty if ld.get(x, '').strip() == '': log.error("[%s] Empty %s", lineno, x) retcode = 1 if ld['platform'] not in ('linux', 'windows', 'darwin', 'src'): log.error("[%s] Unknown platform %s", lineno, ld['platform']) retcode = 1 if ld['arch'] not in ('x32', 'x64', 'all'): log.error("[%s] Unknown architecture %s", lineno, ld['arch']) retcode = 1 if len(ld['sha256sum']) != 64: log.error("[%s] Bad checksum %s", lineno, ld['sha256sum']) retcode = 1 if ld['sha256sum'] != ld['sha256sum'].lower(): log.error("[%s] Uppercase checksum needs to be lower case %s", lineno, ld['sha256sum']) retcode = 1 if ld['upstream_first'] not in ('True', 'False'): log.error("[%s] Upstream first must be 'True' or 'False', was '%s'", lineno, ld['upstream_first']) retcode = 1 platform_id = (ld['id'], ld['version'], ld['platform'], ld['arch']) if platform_id in identifiers: log.error("[%s] identifier is not unique: '%s'", lineno, platform_id) retcode = 1 else: identifiers.add(platform_id) except Exception, e: log.error("[%s] Line (probably) not tabbed properly: %s", lineno, e) retcode = 1 sys.exit(retcode)
def main(galaxy_package_file): with open(galaxy_package_file, 'r') as handle: print '# ' + '\t'.join(['Id', 'Version', 'Platform', 'Architecture', 'Upstream Url', 'Extension', 'sha256sum', 'Use upstream']) res = {} for ld in yield_packages(handle): # id, version, platform,a rch, sha key = '_'.join([ld[x] for x in HEADER_KEYS[0:4] + HEADER_KEYS[6:7]]) if key not in res: res[key] = [] res[key].append(ld) for x in sorted(res): out = [] for key in HEADER_KEYS: out.append(res[x][0][key]) print '\t'.join(out).rstrip("\n")
def main(galaxy_package_file, dryrun=False): visited_paths = [] api_data = {'data': []} with open(galaxy_package_file, 'r') as handle: retcode = 0 xunit = XUnitReportBuilder() xunit.ok("I.Am.Alive") for ld in yield_packages(handle): nice_name = package_to_path(**ld) if not os.path.exists(ld['id']): continue output_package_path = os.path.join(ld['id'], nice_name) + ld['ext'] if not os.path.exists(output_package_path): continue visited_paths.append(os.path.abspath(output_package_path)) if os.path.exists(output_package_path) and os.path.getsize( output_package_path) == 0: log.error("Empty download, removing %s %s", ld['url'], output_package_path) cleanup_file(output_package_path) xunit.failure(nice_name, "EmptyFile", "%s was found to be empty" % output_package_path) err = verify_file(output_package_path, ld['sha256sum'].strip()) if err is not None: xunit.failure(nice_name, "ValidationError", err) err = verify_filetype(output_package_path, ld['ext'].strip(), dryrun=dryrun) if err is not None: xunit.failure(nice_name, "ValidationError", err) with open('report.xml', 'w') as xunit_handle: xunit_handle.write(xunit.serialize()) sys.exit(retcode)
def main(galaxy_package_file): with open(galaxy_package_file, 'r') as handle: print '# ' + '\t'.join(['Id', 'Version', 'Platform', 'Architecture', 'Upstream Url', 'Extension', 'sha256sum', 'Alternate Url']), res = {} for ld in yield_packages(handle): # id, version, platform,a rch, sha key = '_'.join([ld[x] for x in HEADER_KEYS[0:4] + HEADER_KEYS[6:7]]) if key not in res: res[key] = [] res[key].append(ld) for x in sorted(res): out = [] for key in HEADER_KEYS: out.append(res[x][0][key]) print '\t'.join(out).rstrip("\n")
def main(galaxy_package_file): with open(galaxy_package_file, "r") as handle: print "# " + "\t".join( ["Id", "Version", "Platform", "Architecture", "Upstream Url", "Extension", "sha256sum", "Use upstream"] ) res = {} for ld in yield_packages(handle): # id, version, platform,a rch, sha key = "_".join([ld[x] for x in HEADER_KEYS[0:4] + HEADER_KEYS[6:7]]) if key not in res: res[key] = [] res[key].append(ld) for x in sorted(res): out = [] for key in HEADER_KEYS: out.append(res[x][0][key]) print "\t".join(out).rstrip("\n")