def _setitem(self, cpv, values): s = cpv.rfind("/") fp=os.path.join(self.location,cpv[:s],".update.%i.%s" % (os.getpid(), cpv[s+1:])) try: myf = codecs.open(_unicode_encode(fp, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except (OSError, IOError) as e: if errno.ENOENT == e.errno: try: self._ensure_dirs(cpv) myf = codecs.open(_unicode_encode(fp, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except (OSError, IOError) as e: raise cache_errors.CacheCorruption(cpv, e) else: raise cache_errors.CacheCorruption(cpv, e) for x in self.auxdbkey_order: myf.write(values.get(x,"")+"\n") myf.close() self._ensure_access(fp, mtime=values["_mtime_"]) #update written. now we move it. new_fp = os.path.join(self.location,cpv) try: os.rename(fp, new_fp) except (OSError, IOError) as e: os.remove(fp) raise cache_errors.CacheCorruption(cpv, e)
def _setitem(self, cpv, values): with tempfile.NamedTemporaryFile(delete=False, dir=self.location, prefix=cpv.replace('/', '_')) as temp: temp.close() try: with io.open(temp.name, mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') as myf: for k in self._write_keys: v = values.get(k) if not v: continue # NOTE: This format string requires unicode_literals, so that # k and v are coerced to unicode, in order to prevent TypeError # when writing raw bytes to TextIOWrapper with Python 2. myf.write("%s=%s\n" % (k, v)) self._ensure_access(temp.name) # Update written, we can move it. new_fp = os.path.join(self.location, cpv) try: os.rename(temp.name, new_fp) except OSError as e: if e.errno == errno.ENOENT: self._ensure_dirs(cpv) os.rename(temp.name, new_fp) else: raise cache_errors.CacheCorruption(cpv, e) except EnvironmentError as e: os.remove(temp.name) raise cache_errors.CacheCorruption(cpv, e)
def _delitem(self, cpv): try: os.remove(os.path.join(self.location, cpv)) except OSError as e: if errno.ENOENT == e.errno: raise KeyError(cpv) else: raise cache_errors.CacheCorruption(cpv, e)
def _delitem(self, cpv): try: os.remove(os.path.join(self.location,cpv)) except OSError as e: if errno.ENOENT == e.errno: raise KeyError(cpv) else: raise cache_errors.CacheCorruption(cpv, e)
def _setitem(self, cpv, values): s = cpv.rfind("/") fp = os.path.join(self.location, cpv[:s], ".update.%i.%s" % (os.getpid(), cpv[s + 1:])) try: myf = io.open(_unicode_encode(fp, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except (IOError, OSError) as e: if errno.ENOENT == e.errno: try: self._ensure_dirs(cpv) myf = io.open(_unicode_encode(fp, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except (OSError, IOError) as e: raise cache_errors.CacheCorruption(cpv, e) else: raise cache_errors.CacheCorruption(cpv, e) try: for k in self._write_keys: v = values.get(k) if not v: continue # NOTE: This format string requires unicode_literals, so that # k and v are coerced to unicode, in order to prevent TypeError # when writing raw bytes to TextIOWrapper with Python 2. myf.write("%s=%s\n" % (k, v)) finally: myf.close() self._ensure_access(fp) #update written. now we move it. new_fp = os.path.join(self.location, cpv) try: os.rename(fp, new_fp) except (OSError, IOError) as e: os.remove(fp) raise cache_errors.CacheCorruption(cpv, e)
def _setitem(self, cpv, values): s = cpv.rfind("/") fp = os.path.join(self.location, cpv[:s], ".update.%i.%s" % (os.getpid(), cpv[s + 1 :])) try: myf = io.open( _unicode_encode(fp, encoding=_encodings["fs"], errors="strict"), mode="w", encoding=_encodings["repo.content"], errors="backslashreplace", ) except (IOError, OSError) as e: if errno.ENOENT == e.errno: try: self._ensure_dirs(cpv) myf = io.open( _unicode_encode(fp, encoding=_encodings["fs"], errors="strict"), mode="w", encoding=_encodings["repo.content"], errors="backslashreplace", ) except (OSError, IOError) as e: raise cache_errors.CacheCorruption(cpv, e) else: raise cache_errors.CacheCorruption(cpv, e) try: for k in self._write_keys: v = values.get(k) if not v: continue # NOTE: This format string requires unicode_literals, so that # k and v are coerced to unicode, in order to prevent TypeError # when writing raw bytes to TextIOWrapper with Python 2. myf.write("%s=%s\n" % (k, v)) finally: myf.close() self._ensure_access(fp) # update written. now we move it. new_fp = os.path.join(self.location, cpv) try: os.rename(fp, new_fp) except (OSError, IOError) as e: os.remove(fp) raise cache_errors.CacheCorruption(cpv, e)
def _setitem(self, cpv, values): # import pdb;pdb.set_trace() s = cpv.rfind("/") fp = os.path.join(self.location, cpv[:s], ".update.%i.%s" % (os.getpid(), cpv[s + 1:])) try: myf = codecs.open(_unicode_encode(fp, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except (IOError, OSError) as e: if errno.ENOENT == e.errno: try: self._ensure_dirs(cpv) myf = codecs.open(_unicode_encode( fp, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except (OSError, IOError) as e: raise cache_errors.CacheCorruption(cpv, e) else: raise cache_errors.CacheCorruption(cpv, e) try: for k in self._write_keys: v = values.get(k) if not v: continue myf.write("%s=%s\n" % (k, v)) finally: myf.close() self._ensure_access(fp) #update written. now we move it. new_fp = os.path.join(self.location, cpv) try: os.rename(fp, new_fp) except (OSError, IOError) as e: os.remove(fp) raise cache_errors.CacheCorruption(cpv, e)
def _setitem(self, cpv, values): try: fd, fp = tempfile.mkstemp(dir=self.location) except EnvironmentError as e: raise cache_errors.CacheCorruption(cpv, e) with io.open(fd, mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') as myf: for k in self._write_keys: v = values.get(k) if not v: continue # NOTE: This format string requires unicode_literals, so that # k and v are coerced to unicode, in order to prevent TypeError # when writing raw bytes to TextIOWrapper with Python 2. myf.write("%s=%s\n" % (k, v)) self._ensure_access(fp) #update written. now we move it. new_fp = os.path.join(self.location, cpv) try: os.rename(fp, new_fp) except EnvironmentError as e: success = False try: if errno.ENOENT == e.errno: try: self._ensure_dirs(cpv) os.rename(fp, new_fp) success = True except EnvironmentError as e: raise cache_errors.CacheCorruption(cpv, e) else: raise cache_errors.CacheCorruption(cpv, e) finally: if not success: os.remove(fp)
def _setitem(self, cpv, values): try: fd, fp = tempfile.mkstemp(dir=self.location) except EnvironmentError as e: raise cache_errors.CacheCorruption(cpv, e) with io.open(fd, mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') as myf: for k in self._write_keys: v = values.get(k) if not v: continue # NOTE: This format string requires unicode_literals, so that # k and v are coerced to unicode, in order to prevent TypeError # when writing raw bytes to TextIOWrapper with Python 2. myf.write("%s=%s\n" % (k, v)) self._ensure_access(fp) #update written. now we move it. new_fp = os.path.join(self.location,cpv) try: os.rename(fp, new_fp) except EnvironmentError as e: success = False try: if errno.ENOENT == e.errno: try: self._ensure_dirs(cpv) os.rename(fp, new_fp) success = True except EnvironmentError as e: raise cache_errors.CacheCorruption(cpv, e) else: raise cache_errors.CacheCorruption(cpv, e) finally: if not success: os.remove(fp)
def _setitem(self, cpv, values): # import pdb;pdb.set_trace() s = cpv.rfind("/") fp = os.path.join(self.location,cpv[:s],".update.%i.%s" % (os.getpid(), cpv[s+1:])) try: myf = codecs.open(_unicode_encode(fp, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except (IOError, OSError) as e: if errno.ENOENT == e.errno: try: self._ensure_dirs(cpv) myf = codecs.open(_unicode_encode(fp, encoding=_encodings['fs'], errors='strict'), mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') except (OSError, IOError) as e: raise cache_errors.CacheCorruption(cpv, e) else: raise cache_errors.CacheCorruption(cpv, e) try: for k in self._write_keys: v = values.get(k) if not v: continue myf.write("%s=%s\n" % (k, v)) finally: myf.close() self._ensure_access(fp) #update written. now we move it. new_fp = os.path.join(self.location,cpv) try: os.rename(fp, new_fp) except (OSError, IOError) as e: os.remove(fp) raise cache_errors.CacheCorruption(cpv, e)
def _setitem(self, cpv, values): try: fd, fp = tempfile.mkstemp(dir=self.location) except EnvironmentError as e: raise cache_errors.CacheCorruption(cpv, e) with io.open(fd, mode="w", encoding=_encodings["repo.content"], errors="backslashreplace") as myf: for k in self._write_keys: v = values.get(k) if not v: continue myf.write("%s=%s\n" % (k, v)) self._ensure_access(fp) # update written. now we move it. new_fp = os.path.join(self.location, cpv) try: os.rename(fp, new_fp) except EnvironmentError as e: success = False try: if errno.ENOENT == e.errno: try: self._ensure_dirs(cpv) os.rename(fp, new_fp) success = True except EnvironmentError as e: raise cache_errors.CacheCorruption(cpv, e) else: raise cache_errors.CacheCorruption(cpv, e) finally: if not success: os.remove(fp)
def UpdateChangeLog(pkgdir, user, msg, skel_path, category, package, new=(), removed=(), changed=(), pretend=False, quiet=False): """ Write an entry to an existing ChangeLog, or create a new one. Updates copyright year on changed files, and updates the header of ChangeLog with the contents of skel.ChangeLog. """ if '<root@' in user: if not quiet: logging.critical('Please set ECHANGELOG_USER or run as non-root') return None # ChangeLog times are in UTC gmtime = time.gmtime() year = time.strftime('%Y', gmtime) date = time.strftime('%d %b %Y', gmtime) # check modified files and the ChangeLog for copyright updates # patches and diffs (identified by .patch and .diff) are excluded for fn in chain(new, changed): if fn.endswith('.diff') or fn.endswith('.patch'): continue update_copyright(os.path.join(pkgdir, fn), year, pretend=pretend) cl_path = os.path.join(pkgdir, 'ChangeLog') clold_lines = [] clnew_lines = [] old_header_lines = [] header_lines = [] clold_file = None try: clold_file = io.open(_unicode_encode(cl_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass f, clnew_path = mkstemp() # construct correct header first try: if clold_file is not None: # retain header from old ChangeLog first_line = True for line in clold_file: line_strip = line.strip() if line_strip and line[:1] != "#": clold_lines.append(line) break # always make sure cat/pkg is up-to-date in case we are # moving packages around, or copied from another pkg, or ... if first_line: if line.startswith('# ChangeLog for'): line = '# ChangeLog for %s/%s\n' % (category, package) first_line = False old_header_lines.append(line) header_lines.append(_update_copyright_year(year, line)) if not line_strip: break clskel_file = None if not header_lines: # delay opening this until we find we need a header try: clskel_file = io.open(_unicode_encode(skel_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass if clskel_file is not None: # read skel.ChangeLog up to first empty line for line in clskel_file: line_strip = line.strip() if not line_strip: break line = line.replace('<CATEGORY>', category) line = line.replace('<PACKAGE_NAME>', package) line = _update_copyright_year(year, line) header_lines.append(line) header_lines.append('\n') clskel_file.close() # write new ChangeLog entry clnew_lines.extend(header_lines) newebuild = False for fn in new: if not fn.endswith('.ebuild'): continue ebuild = fn.split(os.sep)[-1][0:-7] clnew_lines.append('*%s (%s)\n' % (ebuild, date)) newebuild = True if newebuild: clnew_lines.append('\n') trivial_files = ('ChangeLog', 'Manifest') display_new = ['+' + elem for elem in new if elem not in trivial_files] display_removed = ['-' + elem for elem in removed] display_changed = [elem for elem in changed if elem not in trivial_files] if not (display_new or display_removed or display_changed): # If there's nothing else to display, show one of the # trivial files. for fn in trivial_files: if fn in new: display_new = ['+' + fn] break elif fn in changed: display_changed = [fn] break display_new.sort() display_removed.sort() display_changed.sort() mesg = '%s; %s %s:' % (date, user, ', '.join(chain( display_new, display_removed, display_changed))) for line in textwrap.wrap(mesg, 80, \ initial_indent=' ', subsequent_indent=' ', \ break_on_hyphens=False): clnew_lines.append('%s\n' % line) for line in textwrap.wrap(msg, 80, \ initial_indent=' ', subsequent_indent=' '): clnew_lines.append('%s\n' % line) # Don't append a trailing newline if the file is new. if clold_file is not None: clnew_lines.append('\n') f = io.open(f, mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') for line in clnew_lines: f.write(line) # append stuff from old ChangeLog if clold_file is not None: if clold_lines: # clold_lines may contain a saved non-header line # that we want to write first. # Also, append this line to clnew_lines so that the # unified_diff call doesn't show it as removed. for line in clold_lines: f.write(line) clnew_lines.append(line) else: # ensure that there is no more than one blank # line after our new entry for line in clold_file: if line.strip(): f.write(line) break # Now prepend old_header_lines to clold_lines, for use # in the unified_diff call below. clold_lines = old_header_lines + clold_lines # Trim any trailing newlines. lines = clold_file.readlines() clold_file.close() while lines and lines[-1] == '\n': del lines[-1] f.writelines(lines) f.close() # show diff if not quiet: for line in difflib.unified_diff(clold_lines, clnew_lines, fromfile=cl_path, tofile=cl_path, n=0): util.writemsg_stdout(line, noiselevel=-1) util.writemsg_stdout("\n", noiselevel=-1) if pretend: # remove what we've done os.remove(clnew_path) else: # rename to ChangeLog, and set permissions try: clold_stat = os.stat(cl_path) except OSError: clold_stat = None shutil.move(clnew_path, cl_path) if clold_stat is None: util.apply_permissions(cl_path, mode=0o644) else: util.apply_stat_permissions(cl_path, clold_stat) if clold_file is None: return True else: return False except IOError as e: err = 'Repoman is unable to create/write to Changelog.new file: %s' % (e,) logging.critical(err) # try to remove if possible try: os.remove(clnew_path) except OSError: pass return None
def UpdateChangeLog( pkgdir, user, msg, skel_path, category, package, new=(), removed=(), changed=(), pretend=False, quiet=False): """ Write an entry to an existing ChangeLog, or create a new one. Updates copyright year on changed files, and updates the header of ChangeLog with the contents of skel.ChangeLog. """ if '<root@' in user: if not quiet: logging.critical('Please set ECHANGELOG_USER or run as non-root') return None # ChangeLog times are in UTC gmtime = time.gmtime() year = time.strftime('%Y', gmtime) date = time.strftime('%d %b %Y', gmtime) # check modified files and the ChangeLog for copyright updates # patches and diffs (identified by .patch and .diff) are excluded for fn in chain(new, changed): if fn.endswith('.diff') or fn.endswith('.patch'): continue update_copyright(os.path.join(pkgdir, fn), year, pretend=pretend) cl_path = os.path.join(pkgdir, 'ChangeLog') clold_lines = [] clnew_lines = [] old_header_lines = [] header_lines = [] clold_file = None try: clold_file = io.open(_unicode_encode( cl_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass f, clnew_path = mkstemp() # construct correct header first try: if clold_file is not None: # retain header from old ChangeLog first_line = True for line in clold_file: line_strip = line.strip() if line_strip and line[:1] != "#": clold_lines.append(line) break # always make sure cat/pkg is up-to-date in case we are # moving packages around, or copied from another pkg, or ... if first_line: if line.startswith('# ChangeLog for'): line = '# ChangeLog for %s/%s\n' % (category, package) first_line = False old_header_lines.append(line) header_lines.append(update_copyright_year(year, line)) if not line_strip: break clskel_file = None if not header_lines: # delay opening this until we find we need a header try: clskel_file = io.open(_unicode_encode( skel_path, encoding=_encodings['fs'], errors='strict'), mode='r', encoding=_encodings['repo.content'], errors='replace') except EnvironmentError: pass if clskel_file is not None: # read skel.ChangeLog up to first empty line for line in clskel_file: line_strip = line.strip() if not line_strip: break line = line.replace('<CATEGORY>', category) line = line.replace('<PACKAGE_NAME>', package) line = update_copyright_year(year, line) header_lines.append(line) header_lines.append('\n') clskel_file.close() # write new ChangeLog entry clnew_lines.extend(header_lines) newebuild = False for fn in new: if not fn.endswith('.ebuild'): continue ebuild = fn.split(os.sep)[-1][0:-7] clnew_lines.append('*%s (%s)\n' % (ebuild, date)) newebuild = True if newebuild: clnew_lines.append('\n') trivial_files = ('ChangeLog', 'Manifest') display_new = [ '+' + elem for elem in new if elem not in trivial_files] display_removed = [ '-' + elem for elem in removed] display_changed = [ elem for elem in changed if elem not in trivial_files] if not (display_new or display_removed or display_changed): # If there's nothing else to display, show one of the # trivial files. for fn in trivial_files: if fn in new: display_new = ['+' + fn] break elif fn in changed: display_changed = [fn] break display_new.sort() display_removed.sort() display_changed.sort() mesg = '%s; %s %s:' % (date, user, ', '.join(chain( display_new, display_removed, display_changed))) for line in textwrap.wrap( mesg, 80, initial_indent=' ', subsequent_indent=' ', break_on_hyphens=False): clnew_lines.append('%s\n' % line) for line in textwrap.wrap( msg, 80, initial_indent=' ', subsequent_indent=' '): clnew_lines.append('%s\n' % line) # Don't append a trailing newline if the file is new. if clold_file is not None: clnew_lines.append('\n') f = io.open( f, mode='w', encoding=_encodings['repo.content'], errors='backslashreplace') for line in clnew_lines: f.write(line) # append stuff from old ChangeLog if clold_file is not None: if clold_lines: # clold_lines may contain a saved non-header line # that we want to write first. # Also, append this line to clnew_lines so that the # unified_diff call doesn't show it as removed. for line in clold_lines: f.write(line) clnew_lines.append(line) else: # ensure that there is no more than one blank # line after our new entry for line in clold_file: if line.strip(): f.write(line) break # Now prepend old_header_lines to clold_lines, for use # in the unified_diff call below. clold_lines = old_header_lines + clold_lines # Trim any trailing newlines. lines = clold_file.readlines() clold_file.close() while lines and lines[-1] == '\n': del lines[-1] f.writelines(lines) f.close() # show diff if not quiet: for line in difflib.unified_diff( clold_lines, clnew_lines, fromfile=cl_path, tofile=cl_path, n=0): util.writemsg_stdout(line, noiselevel=-1) util.writemsg_stdout("\n", noiselevel=-1) if pretend: # remove what we've done os.remove(clnew_path) else: # rename to ChangeLog, and set permissions try: clold_stat = os.stat(cl_path) except OSError: clold_stat = None shutil.move(clnew_path, cl_path) if clold_stat is None: util.apply_permissions(cl_path, mode=0o644) else: util.apply_stat_permissions(cl_path, clold_stat) if clold_file is None: return True else: return False except IOError as e: err = 'Repoman is unable to create/write to Changelog.new file: %s' % (e,) logging.critical(err) # try to remove if possible try: os.remove(clnew_path) except OSError: pass return None
def purge(self): """Delete previously saved tracking file if one exists.""" if self.exists(): os.remove(self._tracking_path)
def quit(self): os.remove(self.conf.get('pid_file'))