def fetch_distribution( self, requirement, tmpdir, force_scan=False, source=False, develop_ok=False, local_index=None ): distribute_req = pkg_resources.Requirement.parse("distribute>=0.6.14") if pkg_resources.get_distribution("distribute") in distribute_req: # The local_index parameter is only in distribute>=0.6.14 dist = PackageIndex.fetch_distribution( self, requirement, tmpdir, force_scan, source, develop_ok, local_index ) else: dist = PackageIndex.fetch_distribution(self, requirement, tmpdir, force_scan, source, develop_ok) if dist: log.info("Using %s from %s" % (dist, dist.location)) return dist
def fetch_distribution(self, requirement, tmpdir, force_scan=False, source=False, develop_ok=False, local_index=None): distribute_req = pkg_resources.Requirement.parse('distribute>=0.6.14') if pkg_resources.get_distribution('distribute') in distribute_req: # The local_index parameter is only in distribute>=0.6.14 dist = PackageIndex.fetch_distribution(self, requirement, tmpdir, force_scan, source, develop_ok, local_index) else: dist = PackageIndex.fetch_distribution(self, requirement, tmpdir, force_scan, source, develop_ok) if dist: log.info('Using %s from %s' % (dist, dist.location)) return dist
def fetch_requirement(req, dest_dir, force_download): from setuptools.package_index import PackageIndex # @Reimport from pkg_resources import working_set # @Reimport # NOQA i = PackageIndex() if force_download: [i.remove(i[req.key][0]) for _ in xrange(len(i[req.key]))] d = i.download(req, dest_dir) else: d = i.fetch_distribution(req, dest_dir, force_scan=True) d = getattr(d, 'location', d) if d else '' return (d if d else working_set.resolve([req])[0].location)
, 'Flask-OpenID>=1.0.1' , 'webassets>=0.7.1' ] import os, shutil, sys from pkg_resources import Requirement from setuptools.package_index import PackageIndex def die(message): sys.stderr.write(message) sys.exit(1) if __name__ == '__main__': if len(sys.argv) < 2: print "Please supply a destination directory for the fetched packages!" sys.exit(1) dest_dir = os.path.abspath(sys.argv[1]) print "Fetching packages:" pkg_index = PackageIndex() with open(os.path.join(dest_dir, 'install_order'), 'w') as fd: for pkg in required_packages: print "(--- Processing requirement '{0}'".format(pkg) dist = pkg_index.fetch_distribution(Requirement.parse(pkg), dest_dir, True, True) if dist is None: die("Couldn't find package satisfying '{0}'!".format(pkg)) print " ---) Fetched {0} {1}".format(dist.project_name, dist.version) fd.write(os.path.basename(dist.location) + '\n')
class SetupToolsCommand(Command): """setuptools Command""" description = "Setuptools WSS plugin" user_options = [("pathConfig=", "p", "Configuration file path"), ("debug=", "d", "Show debugging output")] def initialize_options(self): self.debug = None self.proxySetting = None self.service = None self.configDict = None self.pathConfig = None self.token = None self.userEnvironment = None self.distDepend = None self.pkgIndex = PackageIndex() self.dependencyList = [] self.projectCoordinates = None self.tmpdir = tempfile.mkdtemp(prefix="wss_python_plugin-") def finalize_options(self): # log file activation and config if self.debug == "y": logging.basicConfig( format="%(asctime)s%(levelname)s:%(message)s", level=logging.DEBUG, filename="wss_plugin.log" ) # load and import config file try: sys.path.append(self.pathConfig) self.configDict = __import__("config_file").config_info logging.info("Loading config_file was successful") except Exception as err: sys.exit("Can't import the config file." + err.message) # load proxy setting if exist if "proxy" in self.configDict: self.proxySetting = self.configDict["proxy"] if "index_url" in self.configDict: self.pkgIndex = PackageIndex(index_url=self.configDict["index_url"]) self.projectCoordinates = Coordinates.create_project_coordinates(self.distribution) self.userEnvironment = pk_res.Environment(get_python_lib(), platform=None, python=None) distribution_specification = self.distribution.get_name() + "==" + self.distribution.get_version() distribution_requirement = pk_res.Requirement.parse(distribution_specification) # resolve all dependencies try: self.distDepend = pk_res.working_set.resolve([distribution_requirement], env=self.userEnvironment) self.distDepend.pop(0) logging.info("Finished resolving dependencies") except Exception as err: print "distribution was not found on this system, and is required by this application", err.message def run(self): self.validate_config_file() self.scan_modules() self.create_service() self.run_plugin() def validate_config_file(self): """ Validate content of config file params """ # org token if "org_token" in self.configDict: if self.configDict["org_token"] == "": sys.exit("Organization token is empty") else: sys.exit("No organization token option exists") logging.info("Validation of config file was successful") # Todo: check existence of other keys in dict def scan_modules(self): """ Downloads all the dependencies calculates their sha1 and creates a list of dependencies info""" if self.distDepend is not None: for dist in self.distDepend: try: # create a dist instance from requirement instance current_requirement = dist.as_requirement() current_distribution = self.pkgIndex.fetch_distribution( current_requirement, self.tmpdir, force_scan=True, source=True, develop_ok=True ) # create dep. root if current_distribution is not None: self.dependencyList.append(create_dependency_record(current_distribution)) except Exception as err: print "Error in fetching dists " + dist.key + " " + dist.version logging.info("Finished calculation for all dependencies") else: logging.info("No dependencies were found") shutil.rmtree(self.tmpdir) def create_service(self): """ Creates a WssServiceClient with the destination url""" if ("url_destination" in self.configDict) and (self.configDict["url_destination"] != ""): self.service = WssServiceClient(self.configDict["url_destination"], self.proxySetting) else: self.service = WssServiceClient("https://saas.whitesourcesoftware.com/agent", self.proxySetting) logging.debug("The destination url is set to: " + self.service.to_string()) def run_plugin(self): """ Initializes the plugin requests""" org_token = self.configDict["org_token"] project = self.create_project_obj() product = "" product_version = "" if "product_name" in self.configDict: product = self.configDict["product_name"] if "product_version" in self.configDict: product_version = self.configDict["product_version"] self.check_policies(project, org_token, product, product_version) self.update_inventory(project, org_token, product, product_version) def create_project_obj(self): """ create the actual project """ project_token = None if "project_token" in self.configDict: project_token = self.configDict["project_token"] if project_token == "": project_token = None return AgentProjectInfo(self.projectCoordinates, self.dependencyList, project_token) def check_policies(self, project_info, token, product_name, product_version): """ Sends the check policies request to the agent according to the request type """ if ("check_policies" in self.configDict) and (self.configDict["check_policies"]): logging.debug("Checking policies") projects = [project_info] request = CheckPoliciesRequest(token, product_name, product_version, projects) result = self.service.check_policies(request) try: self.handle_policies_result(result) except Exception as err: sys.exit("Some dependencies do not conform with open source policies") def handle_policies_result(self, result): """ Checks if any policies rejected if so stops """ logging.debug("Creating policies report") if result.has_rejections(): print_policies_rejection(result) logging.info("Some dependencies do not conform with open source policies") raise else: logging.debug("All dependencies conform with open source policies") def update_inventory(self, project_info, token, product_name, product_version): """ Sends the update request to the agent according to the request type """ logging.debug("Updating White Source") projects = [project_info] request = UpdateInventoryRequest(token, product_name, product_version, projects) result = self.service.update_inventory(request) print_update_result(result)
class SetupToolsCommand(Command): """setuptools Command""" description = "Setuptools WSS plugin" user_options = [ ('offline=', 'o', 'Offline flag'), ('pathConfig=', 'p', 'Configuration file path'), ('debug=', 'd', 'Show debugging output'), ] def initialize_options(self): self.offline = None self.debug = None self.proxySetting = None self.service = None self.configDict = None self.pathConfig = None self.token = None self.userEnvironment = None self.distDepend = None self.pkgIndex = PackageIndex() self.dependencyList = [] self.projectCoordinates = None self.tmpdir = tempfile.mkdtemp(prefix="wss_python_plugin-") def finalize_options(self): # log file activation and config if self.debug == 'y': logging.basicConfig(format='%(asctime)s%(levelname)s:%(message)s', level=logging.DEBUG, filename='wss_plugin.log') # load and import config file try: sys.path.append(self.pathConfig) if sys.version_info.major >= 3: config_file_spec = importlib.util.spec_from_file_location('config_file', self.pathConfig) config_file_module = importlib.util.module_from_spec(config_file_spec) config_file_spec.loader.exec_module(config_file_module) self.configDict = config_file_module.config_info else: self.configDict = imp.load_source('config_file', self.pathConfig).config_info logging.info('Loading config_file was successful') except Exception as err: print("Can't import the config file.") sys.exit(err) # load proxy setting if exist if 'proxy' in self.configDict: self.proxySetting = self.configDict['proxy'] if 'index_url' in self.configDict: self.pkgIndex = PackageIndex(index_url=self.configDict['index_url']) self.projectCoordinates = Coordinates.create_project_coordinates(self.distribution) self.userEnvironment = pk_res.Environment([get_python_lib()], platform=None, python=None) distribution_specification = self.distribution.get_name() + "==" + self.distribution.get_version() distribution_requirement = pk_res.Requirement.parse(distribution_specification) # resolve all dependencies try: self.distDepend = pk_res.working_set.resolve([distribution_requirement], env=self.userEnvironment) self.distDepend.pop(0) logging.info("Finished resolving dependencies") except Exception as err: print("distribution was not found on this system, and is required by this application", err.message) def run(self): self.validate_config_file() self.scan_modules() self.create_service() self.run_plugin() def validate_config_file(self): """ Validate content of config file params """ # org token if 'org_token' in self.configDict: if self.configDict['org_token'] == '': sys.exit("Organization token is empty") else: sys.exit("No organization token option exists") logging.info("Validation of config file was successful") # Todo: check existence of other keys in dict def scan_modules(self): """ Downloads all the dependencies calculates their sha1 and creates a list of dependencies info""" if self.distDepend is not None: for dist in self.distDepend: try: # create a dist instance from requirement instance current_requirement = dist.as_requirement() current_distribution = self.pkgIndex.fetch_distribution( current_requirement, self.tmpdir, force_scan=True, source=True, develop_ok=True) # create dep. root if current_distribution is not None: self.dependencyList.append(create_dependency_record(current_distribution)) except Exception as err: print("Error in fetching dists " + dist.key + " " + dist.version) logging.info("Finished calculation for all dependencies") else: logging.info("No dependencies were found") shutil.rmtree(self.tmpdir) def create_service(self): """ Creates a WssServiceClient with the destination url""" if ('url_destination' in self.configDict) and (self.configDict['url_destination'] != ''): self.service = WssServiceClient(self.configDict['url_destination'], self.proxySetting) else: self.service = WssServiceClient("https://saas.whitesourcesoftware.com/agent", self.proxySetting) logging.debug("The destination url is set to: " + self.service.to_string()) def run_plugin(self): """ Initializes the plugin requests""" org_token = self.configDict['org_token'] user_key = '' project = self.create_project_obj() product = '' product_version = '' self.connection_retries = 1 self.connection_retries_interval = 3 self.policy_violation = False if 'product_name' in self.configDict: product = self.configDict['product_name'] if 'user_key' in self.configDict: user_key = self.configDict['user_key'] if 'product_version' in self.configDict: product_version = self.configDict['product_version'] if 'connection_retries' in self.configDict: self.connection_retries = self.configDict['connection_retries'] if 'connection_retries_interval' in self.configDict: self.connection_retries_interval = self.configDict['connection_retries_interval'] if self.configDict.get('offline') or self.offline: logging.debug("Offline request") offline_request(project, org_token, user_key, product, product_version) else: if self.configDict.get('check_policies'): logging.debug("Checking policies") self.check_policies(project, org_token, user_key, product, product_version) # no policy violations => send update and pass build if not self.policy_violation: logging.debug("Updating inventory") self.update_inventory(project, org_token, user_key, product, product_version) # policy violation AND force_update elif self.configDict.get('force_update'): print("However all dependencies will be force updated to project inventory.") logging.debug("Updating inventory") self.update_inventory(project, org_token, user_key, product, product_version) # fail the build if self.configDict.get('fail_on_error'): print("Build failure due to policy violation (fail_on_error = True)") sys.exit(1) # policy violation AND (NOT force_update) elif self.configDict.get('fail_on_error'): # fail the build print("Build failure due to policy violation (fail_on_error = True)") sys.exit(1) def create_project_obj(self): """ create the actual project """ project_token = None if 'project_token' in self.configDict: project_token = self.configDict['project_token'] if project_token == '': project_token = None return AgentProjectInfo(coordinates=self.projectCoordinates, dependencies=self.dependencyList, project_token=project_token) def check_policies(self, project_info, token, user_key, product_name, product_version): """ Sends the check policies request to the agent according to the request type """ projects = [project_info] force_check_all_dependencies = self.configDict.get('force_check_all_dependencies') request = CheckPoliciesRequest(token, user_key, product_name, product_version, projects, force_check_all_dependencies) result = self.service.check_policies(request, self.connection_retries, self.connection_retries_interval) try: self.handle_policies_result(result) except Exception: logging.warning("Some dependencies do not conform with open source policies") sys.exit(1) def handle_policies_result(self, result): """ Checks if any policies rejected if so stops """ logging.debug("Creating policies report") if result.has_rejections(): self.policy_violation = True print("Some dependencies do not conform with open source policies:") print_policies_rejection(result) else: logging.debug("All dependencies conform with open source policies!") def update_inventory(self, project_info, token, user_key, product_name, product_version): """ Sends the update request to the agent according to the request type """ logging.debug("Updating White Source") projects = [project_info] request = UpdateInventoryRequest(token, user_key, product_name, product_version, projects) result = self.service.update_inventory(request, self.connection_retries, self.connection_retries_interval) print_update_result(result)
class Command(LabelCommand): option_list = LabelCommand.option_list + ( make_option("-o", "--owner", help="add packages as OWNER", metavar="OWNER", default=None), ) help = """Add one or more packages to the repository. Each argument can be a package name or a URL to an archive or egg. Package names honour the same rules as easy_install with regard to indicating versions etc. If a version of the package exists, but is older than what we want to install, the owner remains the same. For new packages there needs to be an owner. If the --owner option is present we use that value. If not, we try to match the maintainer of the package, form the metadata, with a user in out database, based on the If it's a new package and the maintainer emailmatches someone in our user list, we use that. If not, the package can not be added""" def __init__(self, *args, **kwargs): self.pypi = PackageIndex() LabelCommand.__init__(self, *args, **kwargs) def handle_label(self, label, **options): with tempdir() as tmp: reqs = pkg_resources.parse_requirements(label) for req in reqs: try: package = self.pypi.fetch_distribution(req, tmp, source=True) except Exception as err: print "Could not add %s: %s." % (req, err) else: self._save_package(package.location, options["owner"]) def _save_package(self, path, ownerid): meta = self._get_meta(path) try: # can't use get_or_create as that demands there be an owner package = Package.objects.get(name=meta.name) isnewpackage = False except Package.DoesNotExist: package = Package(name=meta.name) isnewpackage = True release = package.get_release(meta.version) if not isnewpackage and release and release.version == meta.version: print "%s-%s already added" % (meta.name, meta.version) return # algorithm as follows: If owner is given, try to grab user with that # username from db. If doesn't exist, bail. If no owner set look at # mail address from metadata and try to get that user. If it exists # use it. If not, bail. owner = None if ownerid: try: if "@" in ownerid: owner = User.objects.get(email=ownerid) else: owner = User.objects.get(username=ownerid) except User.DoesNotExist: pass else: try: owner = User.objects.get(email=meta.author_email) except User.DoesNotExist: pass if not owner: print "No owner defined. Use --owner to force one" return # at this point we have metadata and an owner, can safely add it. package.owner = owner # Some packages don't have proper licence, seems to be a problem # with setup.py upload. Use "UNKNOWN" package.license = meta.license or "Unknown" package.metadata_version = meta.metadata_version package.author = meta.author package.home_page = meta.home_page package.download_url = meta.download_url package.summary = meta.summary package.description = meta.description package.author_email = meta.author_email package.save() # TODO: Do I need add classifieres objects??? # for classifier in meta.classifiers: # package.classifiers.add( # Classifier.objects.get_or_create(name=classifier)[0]) release = Release() release.version = meta.version release.package = package release.package_info = self._get_pkg_info(meta) # Classifiers is processed separatily since it is a list a must be # properly set son getlist returns the right result for cs in meta.classifiers: release.package_info.update({'classifiers': cs}) release.save() dis = Distribution() dis.release = release dis.content.file = open(path, 'rb') dis.content.name = settings.DJANGOPYPI_RELEASE_UPLOAD_TO + '/' +\ path.split('/')[-1] # TODO: Very bad hack here, how can I fix it? shutil.copy(path, settings.MEDIA_ROOT + '/' + dis.content.name) dis.md5_digest = self._get_md5(path) dis.filetype = self._get_filetype(path) dis.uploader = owner dis.comment = '' dis.pyversion = meta.requires_python or '' dis.signature = '' dis.save() print "%s-%s added" % (meta.name, meta.version) def _get_filetype(self, filename): "Returns the package file type, sdist o bdist" # TODO: review this, very empiric rules if filename.endswith('.zip') or filename.endswith('.tar.gz'): return 'sdist' raise TypeError( "The download resource:{filename} is not a source file".format( filename=filename)) def _get_md5(self, filename): "Returns md5 sum for a given file" md5 = hashlib.md5() with open(filename, 'rb') as content: while(1): block = content.read(md5.block_size) if not block: break md5.update(block) return md5.hexdigest() def _get_pkg_info(self, meta): """ Transforms metadata from a package to dict usable for MultiValueDict instances. """ meta_version = meta.metadata_version if hasattr(meta, 'classifiers') or hasattr(meta, 'download_url') and meta_version == '1.0': meta_version = '1.1' fields = conf.METADATA_FIELDS[meta_version] metadict = dict([(key, [getattr(meta, key),]) for key in dir(meta) if key in fields and not key.startswith('_') and key != 'classifiers']) return metadict def _get_meta(self, path): data = pkginfo.get_metadata(path) if data: return data else: print "Couldn't get metadata from %s. Not added to chishop" % ( os.path.basename(path)) return None
def run(self): # Prepare for iterations. pkgreqmap = reqmap() for reqarg in self.reqarglist: pkgreqmap.append_arg(reqarg) pkgreqmap.resolve_matchlist(self.logobj, self.options['--url'], self.options['--skip-logged']) pkgidx = PackageIndex(index_url = self.options['--url']) show_sepline = False # Main loop. distlist = [] ok_packages = [] while len(pkgreqmap) > 0: new_pkgreqmap = reqmap() for idx, total, pkgreqobj in pkgreqmap.reqobj_seq(): pkgname = pkgreqobj.project_name if pkgname in ok_packages: continue ok_packages.append(pkgname) reqstr = str(pkgreqobj) if show_sepline: self.pkgsys.sepline() else: show_sepline = True self.pkgsys.info('======== %s: %d/%d ========' % \ (pkgname, idx + 1, total)) if self.options['--skip-broken']: try: self.logobj.check_broken(pkgname) except: continue # Collect values into args step by step. args = copy.copy(self.options) args['self'] = self.arg0 self.pkgsys.begin('Downloading %s' % reqstr) try: dist = pkgidx.fetch_distribution(pkgreqobj, self.options['--download-dir'], source = True) if dist is None: raise RuntimeError, 'None' except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Download %s failed' % reqstr) continue else: self.pkgsys.end(True) self.pkgsys.begin('Unpacking %s' % dist.location) try: smart_archive(args, dist, self.options['--unpack-dir']) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Unpack %s failed' % reqstr) continue else: self.pkgsys.end(True) unpackpath = args['unpackpath'] config_secs = ['%s-%s' % (dist.project_name, dist.version), dist.project_name] for secname in config_secs: for name, value in config.items(secname): if name not in args: args[name] = value if not 'patches' in args: args['patches'] = [] else: args['patches'] = args['patches'].split() # Apply patches. for patch in config.patches(config_secs): self.pkgsys.begin('Applying %s' % os.path.basename(patch)) os.system('(cd %s; patch -p0 < %s) > /dev/null' % \ (unpackpath, patch)) self.pkgsys.end(True) if os.path.isfile(os.path.join(unpackpath, 'fixsetup.py')): os.system('(cd %s; python fixsetup.py)' % unpackpath) self.pkgsys.begin('Get package args') try: get_package_args(args, dist) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Get package args failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Setup args') try: self.pkgsys.setup_args(args) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'pkgsys.setup_args failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Writing %s' % args['output']) try: ensure_dir(os.path.dirname(args['output'])) if smart_write(args['output'], os.path.join(pkgroot, args['template']), args): updated = True if smart_symlink(args['pkgpath'], os.path.join(args['filedir'], args['pkgfile'])): updated = True if args['patches'] != []: ensure_dir(args['patchdir']) for patch in config.patches(config_secs): tgtpatch = os.path.join(args['patchdir'], os.path.basename(patch)) if smart_symlink(patch, tgtpatch): updated = True except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'write failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Postproess %s' % args['output']) try: self.pkgsys.process(args) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'process failed') continue else: self.pkgsys.end(True) if self.options['--deps']: reqstrlist = args['install_requires'] for k in args['extras_require'].keys(): reqstrlist.extend(args['extras_require'][k]) for reqstr in reqstrlist: new_pkgreqmap.add(reqstr2obj(reqstr)) self.logobj.pkgname_ok(pkgname) if self.options['--cache-root'] != '': distlist.append(dist) # Process of a single package is finished. pkgreqmap = new_pkgreqmap if self.options['--cache-root']: cache = pypicache(self.pkgsys, self.options['--cache-root'], self.options['--cache-url']) cache.add_packages(distlist) del(cache)
def run(self): # Prepare for iterations. pkgreqmap = reqmap() for reqarg in self.reqarglist: pkgreqmap.append_arg(reqarg) pkgreqmap.resolve_matchlist(self.logobj, self.options['--url'], self.options['--skip-logged']) pkgidx = PackageIndex(index_url=self.options['--url']) show_sepline = False # Main loop. distlist = [] ok_packages = [] while len(pkgreqmap) > 0: new_pkgreqmap = reqmap() for idx, total, pkgreqobj in pkgreqmap.reqobj_seq(): pkgname = pkgreqobj.project_name if pkgname in ok_packages: continue ok_packages.append(pkgname) reqstr = str(pkgreqobj) if show_sepline: self.pkgsys.sepline() else: show_sepline = True self.pkgsys.info('======== %s: %d/%d ========' % \ (pkgname, idx + 1, total)) if self.options['--skip-broken']: try: self.logobj.check_broken(pkgname) except: continue # Collect values into args step by step. args = copy.copy(self.options) args['self'] = self.arg0 self.pkgsys.begin('Downloading %s' % reqstr) try: dist = pkgidx.fetch_distribution( pkgreqobj, self.options['--download-dir'], source=True) if dist is None: raise RuntimeError, 'None' except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Download %s failed' % reqstr) continue else: self.pkgsys.end(True) self.pkgsys.begin('Unpacking %s' % dist.location) try: smart_archive(args, dist, self.options['--unpack-dir']) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Unpack %s failed' % reqstr) continue else: self.pkgsys.end(True) unpackpath = args['unpackpath'] config_secs = [ '%s-%s' % (dist.project_name, dist.version), dist.project_name ] for secname in config_secs: for name, value in config.items(secname): if name not in args: args[name] = value if not 'patches' in args: args['patches'] = [] else: args['patches'] = args['patches'].split() # Apply patches. for patch in config.patches(config_secs): self.pkgsys.begin('Applying %s' % os.path.basename(patch)) os.system('(cd %s; patch -p0 < %s) > /dev/null' % \ (unpackpath, patch)) self.pkgsys.end(True) if os.path.isfile(os.path.join(unpackpath, 'fixsetup.py')): os.system('(cd %s; python fixsetup.py)' % unpackpath) self.pkgsys.begin('Get package args') try: get_package_args(args, dist) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'Get package args failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Setup args') try: self.pkgsys.setup_args(args) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'pkgsys.setup_args failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Writing %s' % args['output']) try: ensure_dir(os.path.dirname(args['output'])) if smart_write(args['output'], os.path.join(pkgroot, args['template']), args): updated = True if smart_symlink( args['pkgpath'], os.path.join(args['filedir'], args['pkgfile'])): updated = True if args['patches'] != []: ensure_dir(args['patchdir']) for patch in config.patches(config_secs): tgtpatch = os.path.join(args['patchdir'], os.path.basename(patch)) if smart_symlink(patch, tgtpatch): updated = True except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'write failed') continue else: self.pkgsys.end(True) self.pkgsys.begin('Postproess %s' % args['output']) try: self.pkgsys.process(args) except: self.pkgsys.end(False) self.logobj.in_except(pkgname, 'process failed') continue else: self.pkgsys.end(True) if self.options['--deps']: reqstrlist = args['install_requires'] for k in args['extras_require'].keys(): reqstrlist.extend(args['extras_require'][k]) for reqstr in reqstrlist: new_pkgreqmap.add(reqstr2obj(reqstr)) self.logobj.pkgname_ok(pkgname) if self.options['--cache-root'] != '': distlist.append(dist) # Process of a single package is finished. pkgreqmap = new_pkgreqmap if self.options['--cache-root']: cache = pypicache(self.pkgsys, self.options['--cache-root'], self.options['--cache-url']) cache.add_packages(distlist) del (cache)
def runit(): # process command-line options bool_opts = map(translate_longopt, stdeb_cmd_bool_opts) bool_opts.append("process-dependencies") parser = FancyGetopt(stdeb_cmdline_opts + [("help", "h", "show detailed help message")] + EXTRA_OPTS) optobj = OptObj() args = parser.getopt(object=optobj) idx = PackageIndex() for option in optobj.__dict__: value = getattr(optobj, option) is_string = type(value) == str if option in bool_opts and is_string: setattr(optobj, option, strtobool(value)) if hasattr(optobj, "help"): print USAGE parser.set_option_table(stdeb_cmdline_opts + EXTRA_OPTS) parser.print_help("Options:") return 0 if len(args) != 1: log.error("not given single argument (distfile), args=%r", args) print USAGE return 1 sdist_file = args[0] package = None final_dist_dir = optobj.__dict__.get("dist_dir", "deb_dist") tmp_dist_dir = os.path.join(final_dist_dir, "tmp_py2dsc") if os.path.exists(tmp_dist_dir): shutil.rmtree(tmp_dist_dir) os.makedirs(tmp_dist_dir) if not os.path.isfile(sdist_file): for ext in EXTENSIONS: if sdist_file.endswith(ext): raise IOError, "File not found" package = Requirement.parse(sdist_file) log.info("Package %s not found, trying PyPI..." % sdist_file) dist = idx.fetch_distribution(package, final_dist_dir, force_scan=True, source=True) if hasattr(dist, "location"): sdist_file = dist.location else: raise Exception, "Distribution not found on PyPi" log.info("Got %s", sdist_file) dist = list(distros_for_filename(sdist_file))[0] idx.scan_egg_links(dist.location) package = idx.obtain(Requirement.parse(dist.project_name)) if hasattr(optobj, "process_dependencies"): if bool(int(getattr(optobj, "process_dependencies"))): backup_argv = sys.argv[:] oldargv = sys.argv[:] oldargv.pop(-1) if package.requires(): log.info("Processing package dependencies for %s", package) for req in package.requires(): # print >> sys.stderr new_argv = oldargv + ["%s" % req] log.info("Bulding dependency package %s", req) log.info(" running '%s'", " ".join(new_argv)) sys.argv = new_argv runit() # print >> sys.stderr if package.requires(): log.info("Completed building dependencies " "for %s, continuing...", package) sys.argv = backup_argv if package is not None and hasattr(optobj, "extra_cfg_file"): # Allow one to have patch-files setup on config file for example local_parser = SafeConfigParser() local_parser.readfp(open(optobj.__dict__.get("extra_cfg_file"))) if local_parser.has_section(package.project_name): for opt in local_parser.options(package.project_name): _opt = opt.replace("_", "-") if parser.has_option(_opt) or parser.has_option(_opt + "="): setattr(optobj, opt, local_parser.get(package.project_name, opt)) patch_file = optobj.__dict__.get("patch_file", None) patch_level = int(optobj.__dict__.get("patch_level", 0)) patch_posix = int(optobj.__dict__.get("patch_posix", 0)) expand_dir = os.path.join(tmp_dist_dir, "stdeb_tmp") if os.path.exists(expand_dir): shutil.rmtree(expand_dir) if not os.path.exists(tmp_dist_dir): os.mkdir(tmp_dist_dir) os.mkdir(expand_dir) expand_sdist_file(os.path.abspath(sdist_file), cwd=expand_dir) # now the sdist package is expanded in expand_dir expanded_root_files = os.listdir(expand_dir) assert len(expanded_root_files) == 1 repackaged_dirname = expanded_root_files[0] fullpath_repackaged_dirname = os.path.join(tmp_dist_dir, repackaged_dirname) base_dir = os.path.join(expand_dir, expanded_root_files[0]) if os.path.exists(fullpath_repackaged_dirname): # prevent weird build errors if this dir exists shutil.rmtree(fullpath_repackaged_dirname) os.renames(base_dir, fullpath_repackaged_dirname) del base_dir # no longer useful ############################################## if patch_file is not None: log.info("py2dsc applying patch %s", patch_file) apply_patch(patch_file, posix=patch_posix, level=patch_level, cwd=fullpath_repackaged_dirname) patch_already_applied = 1 else: patch_already_applied = 0 ############################################## abs_dist_dir = os.path.abspath(final_dist_dir) extra_args = [] for long in parser.long_opts: if long in ["dist-dir=", "patch-file=", "process-dependencies"]: continue # dealt with by this invocation attr = parser.get_attr_name(long).rstrip("=") if hasattr(optobj, attr): val = getattr(optobj, attr) if attr == "extra_cfg_file": val = os.path.abspath(val) if long in bool_opts or long.replace("-", "_") in bool_opts: extra_args.append("--%s" % long) else: extra_args.append("--" + long + str(val)) if patch_already_applied == 1: extra_args.append("--patch-already-applied") args = [ sys.executable, "-c", "import stdeb, sys; f='setup.py'; " + "sys.argv[0]=f; execfile(f,{'__file__':f,'__name__':'__main__'})", "sdist_dsc", "--dist-dir=%s" % abs_dist_dir, "--use-premade-distfile=%s" % os.path.abspath(sdist_file), ] + extra_args log.info("-=" * 35 + "-") # print >> sys.stderr, '-='*20 # print >> sys.stderr, "Note that the .cfg file(s), if present, have not "\ # "been read at this stage. If options are necessary, pass them from "\ # "the command line" log.info("running the following command in directory: %s\n%s", fullpath_repackaged_dirname, " ".join(args)) log.info("-=" * 35 + "-") try: returncode = subprocess.call(args, cwd=fullpath_repackaged_dirname) except: log.error("ERROR running: %s", " ".join(args)) log.error("ERROR in %s", fullpath_repackaged_dirname) raise if returncode: log.error("ERROR running: %s", " ".join(args)) log.error("ERROR in %s", fullpath_repackaged_dirname) # log.error(' stderr: %s'res.stderr.read()) # print >> sys.stderr, 'ERROR running: %s'%(' '.join(args),) # print >> sys.stderr, res.stderr.read() return returncode # raise RuntimeError('returncode %d'%returncode) # result = res.stdout.read().strip() shutil.rmtree(tmp_dist_dir) return returncode