def xpand(myid, mydest): myindex = myid[0] mydata = myid[1] try: origdir = os.getcwd() except SystemExit as e: raise except: os.chdir("/") origdir = "/" os.chdir(mydest) myindexlen = len(myindex) startpos = 0 while ((startpos + 8) < myindexlen): namelen = decodeint(myindex[startpos:startpos + 4]) datapos = decodeint(myindex[startpos + 4 + namelen:startpos + 8 + namelen]) datalen = decodeint(myindex[startpos + 8 + namelen:startpos + 12 + namelen]) myname = myindex[startpos + 4:startpos + 4 + namelen] dirname = os.path.dirname(myname) if dirname: if not os.path.exists(dirname): os.makedirs(dirname) mydat = open( _unicode_encode(myname, encoding=_encodings['fs'], errors='strict'), 'wb') mydat.write(mydata[datapos:datapos + datalen]) mydat.close() startpos = startpos + namelen + 12 os.chdir(origdir)
def FindVCS(cwd=None): """ Try to figure out in what VCS' working tree we are. @param cwd: working directory (default is os.getcwd()) @type cwd: str @return: list of strings describing the discovered vcs types @rtype: list """ if cwd is None: cwd = os.getcwd() outvcs = [] def seek(depth=None): """ Seek for VCSes that have a top-level data directory only. """ retvcs = [] pathprep = cwd while depth is None or depth > 0: for vcs_type in _FindVCS_data: vcs_dir = os.path.join(pathprep, vcs_type.dir_name) if os.path.isdir(vcs_dir): logging.debug('FindVCS: found %(name)s dir: %(vcs_dir)s' % { 'name': vcs_type.name, 'vcs_dir': os.path.abspath(vcs_dir) }) retvcs.append(vcs_type.name) if retvcs: break pathprep = os.path.join(pathprep, '..') if os.path.realpath(pathprep).strip('/') == '': break if depth is not None: depth = depth - 1 return retvcs # Level zero VCS-es. if os.path.isdir(os.path.join(cwd, 'CVS')): outvcs.append('cvs') if os.path.isdir('.svn'): # <1.7 outvcs.append(os.path.join(cwd, 'svn')) # If we already found one of 'level zeros', just take a quick look # at the current directory. Otherwise, seek parents till we get # something or reach root. if outvcs: outvcs.extend(seek(1)) else: outvcs = seek() if len(outvcs) > 1: # eliminate duplicates, like for svn in bug #391199 outvcs = list(set(outvcs)) return outvcs
def xpand(myid,mydest): myindex=myid[0] mydata=myid[1] try: origdir=os.getcwd() except SystemExit as e: raise except: os.chdir("/") origdir="/" os.chdir(mydest) myindexlen=len(myindex) startpos=0 while ((startpos+8)<myindexlen): namelen=decodeint(myindex[startpos:startpos+4]) datapos=decodeint(myindex[startpos+4+namelen:startpos+8+namelen]); datalen=decodeint(myindex[startpos+8+namelen:startpos+12+namelen]); myname=myindex[startpos+4:startpos+4+namelen] dirname=os.path.dirname(myname) if dirname: if not os.path.exists(dirname): os.makedirs(dirname) mydat = open(_unicode_encode(myname, encoding=_encodings['fs'], errors='strict'), 'wb') mydat.write(mydata[datapos:datapos+datalen]) mydat.close() startpos=startpos+namelen+12 os.chdir(origdir)
def FindVCS(cwd=None): """ Try to figure out in what VCS' working tree we are. @param cwd: working directory (default is os.getcwd()) @type cwd: str @return: list of strings describing the discovered vcs types @rtype: list """ if cwd is None: cwd = os.getcwd() outvcs = [] def seek(depth=None): """ Seek for VCSes that have a top-level data directory only. """ retvcs = [] pathprep = cwd while depth is None or depth > 0: for vcs_type in _FindVCS_data: vcs_dir = os.path.join(pathprep, vcs_type.dir_name) if os.path.isdir(vcs_dir): logging.debug( 'FindVCS: found %(name)s dir: %(vcs_dir)s' % { 'name': vcs_type.name, 'vcs_dir': os.path.abspath(vcs_dir)}) retvcs.append(vcs_type.name) if retvcs: break pathprep = os.path.join(pathprep, '..') if os.path.realpath(pathprep).strip('/') == '': break if depth is not None: depth = depth - 1 return retvcs # Level zero VCS-es. if os.path.isdir(os.path.join(cwd, 'CVS')): outvcs.append('cvs') if os.path.isdir('.svn'): # <1.7 outvcs.append(os.path.join(cwd, 'svn')) # If we already found one of 'level zeros', just take a quick look # at the current directory. Otherwise, seek parents till we get # something or reach root. if outvcs: outvcs.extend(seek(1)) else: outvcs = seek() if len(outvcs) > 1: # eliminate duplicates, like for svn in bug #391199 outvcs = list(set(outvcs)) return outvcs
def unpackinfo(self, mydest): """Unpacks all the files from the dataSegment into 'mydest'.""" if not self.scan(): return 0 try: origdir = os.getcwd() except SystemExit as e: raise except: os.chdir("/") origdir = "/" a = open( _unicode_encode(self.file, encoding=_encodings['fs'], errors='strict'), 'rb') if not os.path.exists(mydest): os.makedirs(mydest) os.chdir(mydest) startpos = 0 while ((startpos + 8) < self.indexsize): namelen = decodeint(self.index[startpos:startpos + 4]) datapos = decodeint(self.index[startpos + 4 + namelen:startpos + 8 + namelen]) datalen = decodeint(self.index[startpos + 8 + namelen:startpos + 12 + namelen]) myname = self.index[startpos + 4:startpos + 4 + namelen] myname = _unicode_decode(myname, encoding=_encodings['repo.content'], errors='replace') dirname = os.path.dirname(myname) if dirname: if not os.path.exists(dirname): os.makedirs(dirname) mydat = open( _unicode_encode(myname, encoding=_encodings['fs'], errors='strict'), 'wb') a.seek(self.datapos + datapos) mydat.write(a.read(datalen)) mydat.close() startpos = startpos + namelen + 12 a.close() os.chdir(origdir) return 1
def unpackinfo(self,mydest): """Unpacks all the files from the dataSegment into 'mydest'.""" if not self.scan(): return 0 try: origdir=os.getcwd() except SystemExit as e: raise except: os.chdir("/") origdir="/" a = open(_unicode_encode(self.file, encoding=_encodings['fs'], errors='strict'), 'rb') if not os.path.exists(mydest): os.makedirs(mydest) os.chdir(mydest) startpos=0 while ((startpos+8)<self.indexsize): namelen=decodeint(self.index[startpos:startpos+4]) datapos=decodeint(self.index[startpos+4+namelen:startpos+8+namelen]); datalen=decodeint(self.index[startpos+8+namelen:startpos+12+namelen]); myname=self.index[startpos+4:startpos+4+namelen] myname = _unicode_decode(myname, encoding=_encodings['repo.content'], errors='replace') dirname=os.path.dirname(myname) if dirname: if not os.path.exists(dirname): os.makedirs(dirname) mydat = open(_unicode_encode(myname, encoding=_encodings['fs'], errors='strict'), 'wb') a.seek(self.datapos+datapos) mydat.write(a.read(datalen)) mydat.close() startpos=startpos+namelen+12 a.close() os.chdir(origdir) return 1
def FindPortdir(settings): """ Try to figure out what repo we are in and whether we are in a regular tree or an overlay. Basic logic is: 1. Determine what directory we are in (supports symlinks). 2. Build a list of directories from / to our current location 3. Iterate over PORTDIR_OVERLAY, if we find a match, search for a profiles directory in the overlay. If it has one, make it portdir, otherwise make it portdir_overlay. 4. If we didn't find an overlay in PORTDIR_OVERLAY, see if we are in PORTDIR; if so, set portdir_overlay to PORTDIR. If we aren't in PORTDIR, see if PWD has a profiles dir, if so, set portdir_overlay and portdir to PWD, else make them False. 5. If we haven't found portdir_overlay yet, it means the user is doing something odd, report an error. 6. If we haven't found a portdir yet, set portdir to PORTDIR. Args: settings - portage.config instance, preferably repoman_settings Returns: list(portdir, portdir_overlay, location) """ portdir = None portdir_overlay = None location = os.getcwd() pwd = _unicode_decode(os.environ.get('PWD', ''), encoding=_encodings['fs']) if pwd and pwd != location and os.path.realpath(pwd) == location: # getcwd() returns the canonical path but that makes it hard for repoman to # orient itself if the user has symlinks in their portage tree structure. # We use os.environ["PWD"], if available, to get the non-canonical path of # the current working directory (from the shell). location = pwd location = normalize_path(location) path_ids = {} p = location s = None while True: s = os.stat(p) path_ids[(s.st_dev, s.st_ino)] = p if p == "/": break p = os.path.dirname(p) if location[-1] != "/": location += "/" for overlay in portage.util.shlex_split(settings["PORTDIR_OVERLAY"]): overlay = os.path.realpath(overlay) try: s = os.stat(overlay) except OSError: continue overlay = path_ids.get((s.st_dev, s.st_ino)) if overlay is None: continue if overlay[-1] != "/": overlay += "/" if True: portdir_overlay = overlay subdir = location[len(overlay):] if subdir and subdir[-1] != "/": subdir += "/" if have_profile_dir(location, subdir.count("/")): portdir = portdir_overlay break # Couldn't match location with anything from PORTDIR_OVERLAY, # so fall back to have_profile_dir() checks alone. Assume that # an overlay will contain at least a "repo_name" file while a # master repo (portdir) will contain at least a "profiles.desc" # file. if not portdir_overlay: portdir_overlay = have_profile_dir(location, filename="repo_name") if not portdir_overlay: portdir_overlay = have_ebuild_dir(location) if portdir_overlay: subdir = location[len(portdir_overlay):] if subdir and subdir[-1] != os.sep: subdir += os.sep if have_profile_dir(location, subdir.count(os.sep)): portdir = portdir_overlay if not portdir_overlay: if (settings["PORTDIR"] + os.path.sep).startswith(location): portdir_overlay = settings["PORTDIR"] else: portdir_overlay = have_profile_dir(location) portdir = portdir_overlay if not portdir_overlay: msg = 'Repoman is unable to determine PORTDIR or PORTDIR_OVERLAY' + \ ' from the current working directory' logging.critical(msg) return (None, None, None) if not portdir: portdir = settings["PORTDIR"] if not portdir_overlay.endswith('/'): portdir_overlay += '/' if not portdir.endswith('/'): portdir += '/' return [normalize_path(x) for x in (portdir, portdir_overlay, location)]