def render(self, session, logger, branch, sandbox, bundle, sync, rebase, **arguments): # Most of the logic here is duplicated in deploy if branch: sandbox = branch dbsandbox = Sandbox.get_unique(session, sandbox, compel=True) (handle, filename) = mkstemp() contents = b64decode(bundle) write_file(filename, contents, logger=logger) if sync and not dbsandbox.is_sync_valid and dbsandbox.trackers: # FIXME: Maybe raise an ArgumentError and request that the # command run with --nosync? Maybe provide a --validate flag? # For now, we just auto-flip anyway (below) making the point moot. pass if not dbsandbox.is_sync_valid: dbsandbox.is_sync_valid = True if rebase and dbsandbox.trackers: raise ArgumentError( "{0} has trackers, rebasing is not allowed.".format(dbsandbox)) kingdir = self.config.get("broker", "kingdir") rundir = self.config.get("broker", "rundir") tempdir = mkdtemp(prefix="publish_", suffix="_%s" % dbsandbox.name, dir=rundir) try: run_git([ "clone", "--shared", "--branch", dbsandbox.name, kingdir, dbsandbox.name ], path=tempdir, logger=logger) temprepo = os.path.join(tempdir, dbsandbox.name) run_git(["bundle", "verify", filename], path=temprepo, logger=logger) ref = "HEAD:%s" % (dbsandbox.name) command = ["pull", filename, ref] if rebase: command.append("--force") run_git(command, path=temprepo, logger=logger, loglevel=CLIENT_INFO) # FIXME: Run tests before pushing back to template-king if rebase: target_ref = "+" + dbsandbox.name else: target_ref = dbsandbox.name run_git(["push", "origin", target_ref], path=temprepo, logger=logger) except ProcessException, e: raise ArgumentError("\n%s%s" % (e.out, e.err))
def render(self, session, logger, branch, sandbox, bundle, sync, rebase, **arguments): # Most of the logic here is duplicated in deploy if branch: sandbox = branch dbsandbox = Sandbox.get_unique(session, sandbox, compel=True) (handle, filename) = mkstemp() contents = b64decode(bundle) write_file(filename, contents, logger=logger) if sync and not dbsandbox.is_sync_valid and dbsandbox.trackers: # FIXME: Maybe raise an ArgumentError and request that the # command run with --nosync? Maybe provide a --validate flag? # For now, we just auto-flip anyway (below) making the point moot. pass if not dbsandbox.is_sync_valid: dbsandbox.is_sync_valid = True if rebase and dbsandbox.trackers: raise ArgumentError("{0} has trackers, rebasing is not allowed." .format(dbsandbox)) kingdir = self.config.get("broker", "kingdir") rundir = self.config.get("broker", "rundir") tempdir = mkdtemp(prefix="publish_", suffix="_%s" % dbsandbox.name, dir=rundir) try: run_git(["clone", "--shared", "--branch", dbsandbox.name, kingdir, dbsandbox.name], path=tempdir, logger=logger) temprepo = os.path.join(tempdir, dbsandbox.name) run_git(["bundle", "verify", filename], path=temprepo, logger=logger) ref = "HEAD:%s" % (dbsandbox.name) command = ["pull", filename, ref] if rebase: command.append("--force") run_git(command, path=temprepo, logger=logger, loglevel=CLIENT_INFO) # FIXME: Run tests before pushing back to template-king if rebase: target_ref = "+" + dbsandbox.name else: target_ref = dbsandbox.name run_git(["push", "origin", target_ref], path=temprepo, logger=logger) except ProcessException, e: raise ArgumentError("\n%s%s" % (e.out, e.err))
def write(self, locked=False, content=None): """Write out the template. If the content is unchanged, then the file will not be modified (preserving the mtime). Returns the number of files that were written. If locked is True then it is assumed that error handling happens higher in the call stack. """ if self.template_type == "object" and \ hasattr(self.dbobj, "personality") and \ self.dbobj.personality and \ not self.dbobj.personality.archetype.is_compileable: return 0 if content is None: if not self.new_content: self.new_content = self._generate_content() content = self.new_content self.stash() if self.old_content == content and \ not self.removed and not self.changed: # optimise out the write (leaving the mtime good for ant) # if nothing is actually changed return 0 key = None try: if not locked: key = self.get_write_key() lock_queue.acquire(key) if not os.path.exists(self.plenary_directory): os.makedirs(self.plenary_directory) write_file(self.plenary_file, content, logger=self.logger) self.removed = False if self.old_content != content: self.changed = True except Exception, e: if not locked: self.restore_stash() raise e
try: mtime = os.path.getmtime(os.path.join(root, profile)) except OSError, e: continue if obj in old_object_index and mtime > old_object_index[obj]: modified_index[obj] = mtime content.append("<profile mtime='%d'>%s%s</profile>" % (mtime, obj, advertise_suffix)) content.append("</profiles>") compress = None if gzip_index: compress = 'gzip' write_file(index_path, "\n".join(content), logger=logger, compress=compress) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if config.has_option("broker", "bind_address"): bind_address = socket.gethostbyname(config.get("broker", "bind_address")) if config.has_option("broker", "cdp_send_port"): # pragma: no cover port = config.get_int("broker", "cdp_send_port") else: port = 0 sock.bind((bind_address, port)) if config.has_option("broker", "server_notifications"): service_modules = {} for service in config.get("broker", "server_notifications").split(): if service.strip():
mtime = os.path.getmtime(os.path.join(root, profile)) except OSError, e: continue if obj in old_object_index and mtime > old_object_index[obj]: modified_index[obj] = mtime content.append("<profile mtime='%d'>%s%s</profile>" % (mtime, obj, advertise_suffix)) content.append("</profiles>") compress = None if gzip_index: compress = 'gzip' write_file(index_path, "\n".join(content), logger=logger, compress=compress) sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) if config.has_option("broker", "bind_address"): bind_address = socket.gethostbyname( config.get("broker", "bind_address")) if config.has_option("broker", "cdp_send_port"): # pragma: no cover port = config.get_int("broker", "cdp_send_port") else: port = 0 sock.bind((bind_address, port)) if config.has_option("broker", "server_notifications"): service_modules = {}