def run_cmd(self): query_limit = 100 num_to_display = 50 branch = 'master' project_names = self.get_project_names() extracted_distro_locations = {'new': '/tmp/distro_new', 'old': '/tmp/distro_old'} new_changes = Changes(branch, extracted_distro_locations['new'], num_to_display, query_limit, project_names, self.remote_url) new_projects = new_changes.run_cmd() new_changes.pretty_print_projects(new_projects) old_changes = Changes(branch, extracted_distro_locations['old'], num_to_display, query_limit, project_names, self.remote_url) old_projects = old_changes.run_cmd() old_changes.pretty_print_projects(old_projects) patchset_diff = [] print("\nPatch differences:\n------------------") for project_name, values in new_projects.items(): new_gerrits = values['includes'] for gerrit in new_gerrits: if gerrit not in old_projects[project_name]['includes']: patchset_diff.append(gerrit) print('{:<20}{}\t{}'.format(project_name, gerrit['url'], gerrit['subject'])) print("\n%s different patches between the two distros." % len(patchset_diff))
def run_cmd(branch, distro_patch, limit, qlimit, project_names, remote_url, loglevel=0): changes = Changes(branch, distro_patch, limit, qlimit, project_names, remote_url, loglevel) projects = changes.run_cmd() changes.pretty_print_projects(projects)
def test_pretty_print(self): project_names = PROJECT_NAMES changes = Changes(BRANCH, DISTRO_PATH, LIMIT, QLIMIT, project_names, REMOTE_URL) projects = {} for project in project_names: projects[project] = {"commit": 1, "includes": [{'a': 1}]} changes.pretty_print_projects(projects) for project in project_names: projects[project] = {"commit": 1, "includes": [{"grantedOn": 1, "lastUpdated": 11, "number": "12345", "subject": "This is a test for " + project}, {"grantedOn": 2, "lastUpdated": 22, "number": "56789", "subject": "This is a test for " + project}]} changes.pretty_print_projects(projects)
def run_cmd(self): query_limit = 100 num_to_display = 50 branch = "master" project_names = self.get_project_names() extracted_distro_locations = { "new": "/tmp/distro_new", "old": "/tmp/distro_old", } new_changes = Changes( branch, extracted_distro_locations["new"], num_to_display, query_limit, project_names, self.remote_url, ) new_projects = new_changes.run_cmd() new_changes.pretty_print_projects(new_projects) old_changes = Changes( branch, extracted_distro_locations["old"], num_to_display, query_limit, project_names, self.remote_url, ) old_projects = old_changes.run_cmd() old_changes.pretty_print_projects(old_projects) patchset_diff = [] print("\nPatch differences:\n------------------") for project_name, values in new_projects.items(): new_gerrits = values["includes"] for gerrit in new_gerrits: if gerrit not in old_projects[project_name]["includes"]: patchset_diff.append(gerrit) print( "{:<20}{}\t{}".format( project_name, gerrit["url"], gerrit["subject"] ) ) print("\n%s different patches between the two distros." % len(patchset_diff))
def test_pretty_print(self): project_names = PROJECT_NAMES changes = Changes(BRANCH, DISTRO_PATH, LIMIT, QLIMIT, project_names, REMOTE_URL) projects = {} for project in project_names: projects[project] = {"commit": 1, "includes": [{'a': 1}]} changes.pretty_print_projects(projects) for project in project_names: projects[project] = { "commit": 1, "includes": [{ "grantedOn": 1, "lastUpdated": 11, "number": "12345", "subject": "This is a test for " + project }, { "grantedOn": 2, "lastUpdated": 22, "number": "56789", "subject": "This is a test for " + project }] } changes.pretty_print_projects(projects)
def rollback_files(self): if not exists(self.paths.fsdelta): return changes = Changes.fromfile(self.paths.fsdelta) dirindex = DirIndex(self.paths.dirindex) exceptions = 0 for change in changes: try: if change.path not in dirindex: utils.remove_any(change.path) continue if change.OP in ('o', 'd'): try: self._move_from_originals(change.path) except self.Error: continue dirindex_rec = dirindex[change.path] local_rec = DirIndex.Record.frompath(change.path) if dirindex_rec.uid != local_rec.uid or \ dirindex_rec.gid != local_rec.gid: os.lchown(change.path, dirindex_rec.uid, dirindex_rec.gid) if dirindex_rec.mod != local_rec.mod: mod = stat.S_IMODE(dirindex_rec.mod) os.chmod(change.path, mod) except: exceptions += 1 # fault-tolerance: warn and continue, don't die traceback.print_exc(file=sys.stderr) for fname in ('passwd', 'group'): shutil.copy(join(self.paths.etc, fname), "/etc") if exceptions: raise Error("caught %d exceptions during rollback_files" % exceptions)
def run (self): """ """ #Create folders and copy sources files DEBIAN_DIR = os.path.join(self.build_dir,'debian') DATA_DIR = os.path.join(self.build_dir,self.debian_package) mkpath(DEBIAN_DIR) mkpath(self.dist_dir) #mkpath(os.path.join(DATA_DIR,'usr','bin')) self.bdist_dir = DATA_DIR install = self.reinitialize_command('install', reinit_subcommands=1) install.root = self.bdist_dir if self.install_purelib is not None: install.install_purelib = self.install_purelib install.skip_build = 0 install.warn_dir = 1 self.run_command('install') #Create the debian rules rules = Rules(self.debian_package,DATA_DIR) dirs = rules.dirs open(os.path.join(DEBIAN_DIR,"rules"),"w").write(unicode(rules.getContent()).encode('utf-8')) os.chmod(os.path.join(DEBIAN_DIR,"rules"),0755) #Create the debian compat open(os.path.join(DEBIAN_DIR,"compat"),"w").write("5\n") #Create the debian dirs open(os.path.join(DEBIAN_DIR,"dirs"),"w").write("\n".join(dirs)) #Create the debian changelog d=datetime.now() self.buildDate=d.strftime("%a, %d %b %Y %H:%M:%S +0000") clog = Changelog(self.debian_package,self.version,self.buildversion,self.changelog,self.distribution.get_maintainer(),self.distribution.get_maintainer_email(),self.buildDate) open(os.path.join(DEBIAN_DIR,"changelog"),"w").write(unicode(clog.getContent()).encode('utf-8')) #Create the pre/post inst/rm Script if self.preinst is not None: self.mkscript(self.preinst ,os.path.join(DEBIAN_DIR,"preinst")) if self.postinst is not None: self.mkscript(self.postinst,os.path.join(DEBIAN_DIR,"postinst")) if self.prere is not None: self.mkscript(self.prere ,os.path.join(DEBIAN_DIR,"prerm")) if self.postre is not None: self.mkscript(self.postre ,os.path.join(DEBIAN_DIR,"postrm")) #Create the control file control = Control(self.debian_package, self.section, self.distribution.get_maintainer(), self.distribution.get_maintainer_email(), self.architecture, self.depends, self.suggests, self.description, self.long_description, self.conflicts, self.replaces, optionnal = { 'XB-Maemo-Display-Name':self.Maemo_Display_Name, 'XB-Maemo-Upgrade-Description':self.Maemo_Upgrade_Description, 'XSBC-Bugtracker':self.Maemo_Bugtracker, 'XB-Maemo-Icon-26':self.getIconContent(self.Maemo_Icon_26), 'XB-Maemo-Flags':self.Maemo_Flags, 'XB-Meego-Desktop-Entry-Filename':self.MeeGo_Desktop_Entry_Filename } ) open(os.path.join(DEBIAN_DIR,"control"),"w").write(unicode(control.getContent()).encode('utf-8')) #Create the debian licence file licence = Licence(self.copyright, self.distribution.get_maintainer(), self.distribution.get_maintainer_email(), self.buildDate, str(datetime.now().year)) open(os.path.join(DEBIAN_DIR,"copyright"),"w").write(unicode(licence.getContent()).encode('utf-8')) #Delete tar if already exist as it will made add to the same tar tarpath = os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion+'.tar.gz') if os.path.exists(tarpath): os.remove(tarpath) #Now create the tar.gz import tarfile def reset(tarinfo): tarinfo.uid = tarinfo.gid = 0 tarinfo.uname = tarinfo.gname = "root" return tarinfo tar = tarfile.open(tarpath, 'w:gz') #tar.add(self.dist_dir,'.') tar.add(self.build_dir,'.') tar.close() #Clean the build dir remove_tree(DEBIAN_DIR) remove_tree(DATA_DIR) #Create the Dsc file import locale try: old_locale,iso=locale.getlocale(locale.LC_TIME) locale.setlocale(locale.LC_TIME,'en_US') except: pass dsccontent = Dsc("%s-%s"%(self.version,self.buildversion), self.depends, (os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion+'.tar.gz'),), Format='1.0', Source=self.debian_package, Version="%s-%s"%(self.version,self.buildversion), Maintainer="%s <%s>"%(self.distribution.get_maintainer(),self.distribution.get_maintainer_email()), Architecture="%s"%self.architecture, ) f = open(os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion+'.dsc'),"wb") f.write(unicode(dsccontent._getContent()).encode('utf-8')) f.close() #Changes file changescontent = Changes( "%s <%s>"%(self.distribution.get_maintainer(),self.distribution.get_maintainer_email()), "%s"%self.description, "%s"%self.changelog, ( "%s.tar.gz"%os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion), "%s.dsc"%os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion), ), "%s"%self.section, "%s"%self.repository, Format='1.7', Date=time.strftime("%a, %d %b %Y %H:%M:%S +0000", time.gmtime()), Source="%s"%self.debian_package, Architecture="%s"%self.architecture, Version="%s-%s"%(self.version,self.buildversion), Distribution="%s"%self.repository, Urgency="%s"%self.urgency, Maintainer="%s <%s>"%(self.distribution.get_maintainer(),self.distribution.get_maintainer_email()) ) f = open(os.path.join(self.dist_dir,self.debian_package+'_'+self.version+'-'+self.buildversion+'.changes'),"wb") f.write(unicode(changescontent.getContent()).encode('utf-8')) f.close() try: locale.setlocale(locale.LC_TIME,old_locale) except: pass
def test_epoch_to_utc(self): project_names = PROJECT_NAMES changes = Changes(BRANCH, DISTRO_PATH, LIMIT, QLIMIT, project_names, REMOTE_URL) print("utc: %s" % changes.epoch_to_utc(1483974872))
def files(self): extras = self.extras if not exists(extras.fsdelta): return overlay = self.backup_extract_path simulate = self.simulate rollback = self.rollback limits = self.limits.fs print "\n" + self._title("Restoring filesystem") passwd, group, uidmap, gidmap = self._userdb_merge(extras.etc, "/etc") if uidmap or gidmap: print "MERGING USERS AND GROUPS\n" for olduid in uidmap: print "UID %d => %d" % (olduid, uidmap[olduid]) for oldgid in gidmap: print "GID %d => %d" % (oldgid, gidmap[oldgid]) print changes = Changes.fromfile(extras.fsdelta, limits) deleted = list(changes.deleted()) if rollback: rollback.save_files(changes, overlay) fsdelta_olist = self._get_fsdelta_olist(extras.fsdelta_olist, limits) if fsdelta_olist: print "OVERLAY:\n" print "\n".join(fsdelta_olist) if not simulate: self._apply_overlay(overlay, '/', fsdelta_olist) statfixes = list(changes.statfixes(uidmap, gidmap)) if statfixes or deleted: print "\nPOST-OVERLAY FIXES:\n" for action in statfixes: print action if not simulate: action() for action in deleted: print action # rollback moves deleted to 'originals' if not simulate and not rollback: action() def w(path, s): file(path, "w").write(str(s)) if not simulate: w("/etc/passwd", passwd) w("/etc/group", group)
def __init__(self): # detect args parser = OptionParser() parser.add_option("-d", "--debug", dest="debug", action="store_true", help="set debug mode", default=False) parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="verbose mode", default=False) parser.add_option("-p", "--page", dest="page", help="specify a page to handle") parser.add_option("--onepage", dest="onepage", action="store_true", help="only handle a single page (similar to `--pages=1')", default=False) parser.add_option("--task", dest="task", help="run builtin task (use `--task=help' for list of tasks)") parser.add_option("--pages", dest="pagestorun", help="number of pages to handle") parser.add_option("-s", "--search", dest="search", help="search string") parser.add_option("-t", "--time", dest="time", help="set time in format YYYY-MM-DDTHH:MM:SSZ") parser.add_option("--titlesearch", dest="titlesearch", action="store_true", help="search in titles rather than text", default=False) parser.add_option("--writetime", dest="writetime", action="store_true", help="write timestamp at end of run, even if other variables would disable that", default=False) parser.add_option("-r", "--ref", "--loadref", dest="loadref", action="store_true", help="force appearance reference load from wiki", default=False) parser.add_option("--localref", dest="localref", action="store_true", help="force appearance reference to be loaded locally", default=False) parser.add_option("--runtasks", dest="runtasks", action="store_true", help="force run tasks", default=False) parser.add_option("--changes", dest="changes", help="maximum recent changes to run") parser.add_option("--nochanges", dest="nochanges", action="store_true", help="don't check recent changes", default=False) parser.add_option("--donotfixpages", dest="donotfixpages", action="store_true", help="no page cleanup", default=False) parser.add_option("--donotrunpool", dest="donotrunpool", action="store_true", help="don't run changes for the pool", default=False) (options, args) = parser.parse_args() # set debug mode self.debugmode = options.debug self.verbose = options.verbose self.api = Api(debug=self.debugmode, server="theinfosphere.org") self.poolapi = Api(debug=self.debugmode, server="pool.theinfosphere.org") if self.debugmode: self.log("==== Debug mode!") if options.task!=None: # oh noes, we need to run a builtin task, so we should not run # regular tasks taskmanager = Taskmanager(debug=self.debugmode, api=self.api, verbose=self.verbose) if options.task == 'help': print taskmanager.help_response() else: self.log("Builtin task run...") if self.api.login(userdata.username, userdata.password): if not taskmanager.run_task(options.task): self.log("Task `%s' did not run properly or does not exist." % options.task) else: self.log("Could not log in.") self.writelog() else: if not options.nochanges: self.changes = Changes(debug=self.debugmode, api=self.api, verbose=self.verbose) if options.changes != None: self.changes.maximum = options.changes self.runtasks = options.runtasks self.pages = Pages(debug=self.debugmode, api=self.api, verbose=self.verbose) self.pages.singlepage = self.page = options.page pagestorun = 0 if options.onepage: pagestorun = 1 if options.pagestorun != None: pagestorun = options.pagestorun self.pages.pagestorun = self.pagestorun = pagestorun self.pages.search = self.search = options.search self.pages.titlesearch = self.titlesearch = options.titlesearch if options.time!=None: try: time.strptime(options.time, "%Y-%m-%dT%H:%M:%SZ") self.ts = options.time self.log("Using input time of `%s'." % self.ts) except ValueError: self.log("Input time given `%s' does not match format; ignoring." % self.ts) self.ts = None if self.debugmode or self.search!=None or self.pagestorun==1 or self.page!=None or self.ts!=None: # if any of these are the case, then writing a new time stamp could # mess up future runs self.writetime = False if options.writetime: # unless of course, it is forced by the user self.writetime = True self.pages.ar.loadref = options.loadref self.pages.ar.localref = options.localref # insure the api knows, so it won't make edits if debug mode if self.api.login(userdata.username, userdata.password): self.log("Logged in successfully.") if self.check_for_lock(): self.initiate() if not options.nochanges: self.changes.set_ts(self.ts) self.pages.set_ts(self.ts) if self.runtasks or (self.pagestorun==0 and self.page == None): tasks = Tasks(self.debugmode, self.api, self.verbose, self.pages) self.log_obtain(tasks) if not options.nochanges: self.changes.handle_changes() self.log_obtain(self.changes) else: self.log("Not running recent changes checking...") if not options.donotfixpages: self.pages.routine_run() else: self.log("Not running page fixing...") self.log_obtain(self.pages) if not options.donotrunpool: self.images = Images(debug=self.debugmode, api=self.api, verbose=self.verbose, poolapi=self.poolapi) self.images.routine_job(self.ts) self.log_obtain(self.images) else: self.log("Not running through the pool...") self.finish() else: self.log("Could not log in.") self.writelog()
class Svipbot(object): api = None poolapi = None loglines = [] debugmode = False writetime = True oldts = None ts = None newts = None pages = None images = None def check_for_lock(self): if os.path.exists("botlock"): self.log("Botlock file exists.") return False self.log("No botlock.") return True def log(self, msg=None, view=True): if msg==None: return self.loglines else: self.loglines.append(msg) if (self.debugmode or self.verbose) and view: print msg def log_obtain(self, obj): if obj==None: return for l in obj.get_log(): self.log(l, False) obj.clear_log() def initiate(self): f = open('botlock', 'w') f.write('x') f.close() self.log("Wrote botlock.") try: f = open('time', 'r') self.ts = f.read().strip() self.log("Found time of `%s'." % self.ts) f.close() except IOError: self.ts = None # just to make sure # create a timestamp now, because the edits may take time, and someone could # make an edit in between, and we want to catch that. self.newts = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) def finish(self): # current time if self.newts==None: ts = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime()) else: ts = self.newts if self.writetime: f = open('time', 'w') f.write(ts) f.close() self.log("Wrote timestamp of `%s'." % ts) # remove botlock os.remove("botlock") self.log("Removed botlock") def writelog(self): if not self.debugmode: # log fn = "logs/runlog-%s" % time.strftime("%Y%m%dT%H%M", time.gmtime()) log = open(fn, 'w') log.writelines([unicode("%s\n"%s).encode('utf-8') for s in self.log()]) log.close() def __init__(self): # detect args parser = OptionParser() parser.add_option("-d", "--debug", dest="debug", action="store_true", help="set debug mode", default=False) parser.add_option("-v", "--verbose", dest="verbose", action="store_true", help="verbose mode", default=False) parser.add_option("-p", "--page", dest="page", help="specify a page to handle") parser.add_option("--onepage", dest="onepage", action="store_true", help="only handle a single page (similar to `--pages=1')", default=False) parser.add_option("--task", dest="task", help="run builtin task (use `--task=help' for list of tasks)") parser.add_option("--pages", dest="pagestorun", help="number of pages to handle") parser.add_option("-s", "--search", dest="search", help="search string") parser.add_option("-t", "--time", dest="time", help="set time in format YYYY-MM-DDTHH:MM:SSZ") parser.add_option("--titlesearch", dest="titlesearch", action="store_true", help="search in titles rather than text", default=False) parser.add_option("--writetime", dest="writetime", action="store_true", help="write timestamp at end of run, even if other variables would disable that", default=False) parser.add_option("-r", "--ref", "--loadref", dest="loadref", action="store_true", help="force appearance reference load from wiki", default=False) parser.add_option("--localref", dest="localref", action="store_true", help="force appearance reference to be loaded locally", default=False) parser.add_option("--runtasks", dest="runtasks", action="store_true", help="force run tasks", default=False) parser.add_option("--changes", dest="changes", help="maximum recent changes to run") parser.add_option("--nochanges", dest="nochanges", action="store_true", help="don't check recent changes", default=False) parser.add_option("--donotfixpages", dest="donotfixpages", action="store_true", help="no page cleanup", default=False) parser.add_option("--donotrunpool", dest="donotrunpool", action="store_true", help="don't run changes for the pool", default=False) (options, args) = parser.parse_args() # set debug mode self.debugmode = options.debug self.verbose = options.verbose self.api = Api(debug=self.debugmode, server="theinfosphere.org") self.poolapi = Api(debug=self.debugmode, server="pool.theinfosphere.org") if self.debugmode: self.log("==== Debug mode!") if options.task!=None: # oh noes, we need to run a builtin task, so we should not run # regular tasks taskmanager = Taskmanager(debug=self.debugmode, api=self.api, verbose=self.verbose) if options.task == 'help': print taskmanager.help_response() else: self.log("Builtin task run...") if self.api.login(userdata.username, userdata.password): if not taskmanager.run_task(options.task): self.log("Task `%s' did not run properly or does not exist." % options.task) else: self.log("Could not log in.") self.writelog() else: if not options.nochanges: self.changes = Changes(debug=self.debugmode, api=self.api, verbose=self.verbose) if options.changes != None: self.changes.maximum = options.changes self.runtasks = options.runtasks self.pages = Pages(debug=self.debugmode, api=self.api, verbose=self.verbose) self.pages.singlepage = self.page = options.page pagestorun = 0 if options.onepage: pagestorun = 1 if options.pagestorun != None: pagestorun = options.pagestorun self.pages.pagestorun = self.pagestorun = pagestorun self.pages.search = self.search = options.search self.pages.titlesearch = self.titlesearch = options.titlesearch if options.time!=None: try: time.strptime(options.time, "%Y-%m-%dT%H:%M:%SZ") self.ts = options.time self.log("Using input time of `%s'." % self.ts) except ValueError: self.log("Input time given `%s' does not match format; ignoring." % self.ts) self.ts = None if self.debugmode or self.search!=None or self.pagestorun==1 or self.page!=None or self.ts!=None: # if any of these are the case, then writing a new time stamp could # mess up future runs self.writetime = False if options.writetime: # unless of course, it is forced by the user self.writetime = True self.pages.ar.loadref = options.loadref self.pages.ar.localref = options.localref # insure the api knows, so it won't make edits if debug mode if self.api.login(userdata.username, userdata.password): self.log("Logged in successfully.") if self.check_for_lock(): self.initiate() if not options.nochanges: self.changes.set_ts(self.ts) self.pages.set_ts(self.ts) if self.runtasks or (self.pagestorun==0 and self.page == None): tasks = Tasks(self.debugmode, self.api, self.verbose, self.pages) self.log_obtain(tasks) if not options.nochanges: self.changes.handle_changes() self.log_obtain(self.changes) else: self.log("Not running recent changes checking...") if not options.donotfixpages: self.pages.routine_run() else: self.log("Not running page fixing...") self.log_obtain(self.pages) if not options.donotrunpool: self.images = Images(debug=self.debugmode, api=self.api, verbose=self.verbose, poolapi=self.poolapi) self.images.routine_job(self.ts) self.log_obtain(self.images) else: self.log("Not running through the pool...") self.finish() else: self.log("Could not log in.") self.writelog()
.format(vertex.id, prop.key, prop.value)) vertex_id, props = self.recv_vertex() if vertex_id != vertex.id: print("[-] property_is fail") return False return True def check_graph(self, graph): return self.check_graph_vertices(graph) and \ self.check_graph_edges(graph) and \ self.check_properties(graph) print("SEED={}".format(SEED)) print("LENGTH={}".format(LENGTH)) for i in range(LENGTH): print("[i] i = {}".format(i)) num_changes = random.randint(1, 100) client = Client(HOST, PORT) graph = Graph() for i in range(num_changes): changes = Changes(graph) client.apply_changes(changes) if not client.check_graph(graph): print("check graph failed") sys.exit(-1) client.exit() sys.exit(0)
def files(self): extras = self.extras if not exists(extras.fsdelta): return overlay = self.backup_extract_path simulate = self.simulate rollback = self.rollback limits = self.limits.fs print fmt_title("FILES - restoring files, ownership and permissions", '-') passwd, group, uidmap, gidmap = self._userdb_merge(extras.etc, "/etc") if uidmap or gidmap: print "MERGING USERS AND GROUPS:\n" for olduid in uidmap: print " UID %d => %d" % (olduid, uidmap[olduid]) for oldgid in gidmap: print " GID %d => %d" % (oldgid, gidmap[oldgid]) print changes = Changes.fromfile(extras.fsdelta, limits) deleted = list(changes.deleted()) if rollback: rollback.save_files(changes, overlay) fsdelta_olist = self._get_fsdelta_olist(extras.fsdelta_olist, limits) if fsdelta_olist: print "OVERLAY:\n" for fpath in fsdelta_olist: print " " + fpath if not simulate: self._apply_overlay(overlay, '/', fsdelta_olist) print statfixes = list(changes.statfixes(uidmap, gidmap)) if statfixes or deleted: print "POST-OVERLAY FIXES:\n" for action in statfixes: print " " + str(action) if not simulate: action() for action in deleted: print " " + str(action) # rollback moves deleted to 'originals' if not simulate and not rollback: action() if statfixes or deleted: print def w(path, s): file(path, "w").write(str(s)) if not simulate: w("/etc/passwd", passwd) w("/etc/group", group)
simulate = False verbose = False for opt, val in opts: if opt in ('-s', '--simulate'): simulate = True elif opt in ('-v', '--verbose'): verbose = True else: usage() if len(args) < 1: usage() delta = args[0] paths = args[1:] changes = Changes.fromfile(delta, paths) if simulate: verbose = True for action in changes.deleted(): if verbose: print action if not simulate: action() if __name__ == "__main__": main()
uidmap = parse_idmap(val) elif opt in ('-g', '--gid-map'): gidmap = parse_idmap(val) elif opt in ('-s', '--simulate'): simulate = True elif opt in ('-v', '--verbose'): verbose = True else: usage() if len(args) < 1: usage() delta = args[0] paths = args[1:] changes = Changes.fromfile(delta, paths) if simulate: verbose = True for action in changes.statfixes(uidmap, gidmap): if verbose: print action if not simulate: action() if __name__=="__main__": main()
from raco import Raco from cpds import Cpds from progress import Progress from source import Source from changes import Changes from tmiri import Tmiri import json with open('config.json','r') as f: config = json.load(f) progress = Progress() changes = Changes() sources = [ Tmiri(config, progress, changes), Cpds(config, progress, changes), Raco(config, progress, changes) ] for source in sources: source.update() progress.end() changes.status() #for source in sources: # source.status()