def read(self): #golang_plugin_path = "/home/jchaloup/Projects/gofed/gofed/plugins" if self.plugin_directory == "": golang_plugin_path = Config().getGolangPlugins() else: golang_plugin_path = self.plugin_directory for dirName, _, fileList in walk(golang_plugin_path): for file in fileList: if not file.endswith(".json"): continue json_file = "%s/%s" % (dirName, file) plugin_obj = Plugin(json_file) if not plugin_obj.parse(): self.err += plugin_obj.getError() return False self.plugins[file] = plugin_obj return True
def v3(name="precise-default", branch="grizzly", template_path=None, config=None, destroy=True): logging.basicConfig(level=logging.DEBUG) config = Config(config) deployment = ChefRazorDeployment.fromfile(name, branch, config, template_path) print deployment try: deployment.build() except Exception: print traceback.print_exc() deployment.destroy() sys.exit(1) if destroy: deployment.destroy()
default=False, help="run the command in dry mode") parser.add_option("", "", "--verbose", dest="debug", action="store_true", default=False, help="be more verbose ") options, args = parser.parse_args() fp_obj = FormatedPrint(formated=False) branches = Config().getBranches() if options.branches != "": bs = filter(lambda b: b != "", options.branches.split(",")) for branch in bs: if branch in branches: continue print "%s branch not in a list of all branches" % branch exit(1) branches = bs if options.ebranches != "": ebs = filter(lambda b: b != "", options.ebranches.split(",")) for branch in ebs: if branch in branches:
"--skip-rpmlint-errors", dest="skiprpmlint", action="store_true", default=False, help="skip rpmlint errors if any") options, args = parser.parse_args() if len(args) != 1: print "Synopsis: [--user=USER|-u USER] [--skip-koji] SPEC" exit(1) specfile = args[0] user = options.user if user == "": user = Config().getFASUsername() if not os.path.exists(specfile): print "Spec file %s not found" % specfile exit(1) obj = SpecParser(specfile) if not obj.parse(): print obj.getError() exit(1) provider = obj.getMacro("provider") repo = obj.getMacro("repo") commit = obj.getMacro("commit") summary = obj.getTag("summary") name = obj.getTag("name")
parser.add_option("", "", "--skip-errors", dest="skiperrors", action="store_true", default=False, help="Skip all errors during Go symbol parsing") options, args = parser.parse_args() path = "." if len(args): path = args[0] if not options.scanalldirs: noGodeps = Config().getSkippedDirectories() else: noGodeps = [] if options.skipdirs: for dir in options.skipdirs.split(','): dir = dir.strip() if dir == "": continue noGodeps.append(dir) gse_obj = GoSymbolsExtractor(path, noGodeps=noGodeps, skip_errors=options.skiperrors) if not gse_obj.extract(): sys.stderr.write("%s\n" % gse_obj.getError())
elif options.googlecode: import_path = "code.google.com/p/%s" % repo commit = options.rev elif options.bitbucket: import_path = "bitbucket.org/%s/%s" % (project, repo) commit = options.commit else: fmt_obj.printError("Provider not supported") exit(1) else: import_path = options.detect commit = options.commit if not options.scanalldirs: noGodeps = Config().getSkippedDirectories() else: noGodeps = [] if options.skipdirs: for dir in options.skipdirs.split(','): dir = dir.strip() if dir == "": continue noGodeps.append(dir) # 1. decode some package info (name, archive url, ...) # 2. set path to downloaded tarball # 3. retrieve project info from tarball # 4. generate spec file
parser.add_option("", "", "--endwithupdate", dest="endupdate", action="store_true", default=False, help="stop wizard after update phase") options, args = parser.parse_args() pm = PhaseMethods(dry=options.dry, debug=options.debug) # check branches if options.branches: branches = Config().getBranches() sb = filter(lambda b: b != "", options.branches.split(",")) for b in sb: if b not in branches: print "%s branch not in common branches" % b exit(1) pm.setBranches(sorted(sb)) if options.ebranches: branches = Config().getBranches() sb = filter(lambda b: b != "", options.ebranches.split(",")) branches = list(set(branches) - set(sb)) pm.setBranches(sorted(branches)) if options.master: branches = ["master"]
def __init__(self): self.config = Config() self.razor = razor_api(self.config['razor']['ip']) self.chef = autoconfigure() self.chef.set_default()
def __init__(self, url=Config().getGofedWebUrl()): self.url = url pass
import signal import gi from modules.MonitorThread import MonitorThread from modules.Utils import Utils from modules.Config import Config from modules.Menu import Menu gi.require_version('Gtk', '3.0') gi.require_version('AppIndicator3', '0.1') gi.require_version('Notify', '0.7') from gi.repository import Gtk, AppIndicator3, Notify APPINDICATOR_ID = 'NordVPN_appIndicator' signal.signal(signal.SIGINT, signal.SIG_DFL) utils = Utils() config = Config(os.path.abspath('./resources/config.ini')) def main(): path = os.path.abspath('./resources/icons/nordvpn_icon_square.png') category = AppIndicator3.IndicatorCategory.SYSTEM_SERVICES indicator = AppIndicator3.Indicator.new(APPINDICATOR_ID, path, category) indicator.set_status(AppIndicator3.IndicatorStatus.ACTIVE) indicator.set_menu(Menu(config)) Notify.init("NordVPN") MonitorThread(indicator, 20) Gtk.main() if __name__ == '__main__':
#!/usr/bin/python3 #import datetime from modules.Config import Config # TODO: Make it prettier from modules.DB import DB # TODO: Make it prettier from modules.Twitter import TIVTwitter # TODO: Make it prettier from modules.File import File from modules.Gmaps import Gmaps # Start of our mining application if __name__ == '__main__': # Create instances config = Config() db = DB(config) gmaps = Gmaps(config) tiv = TIVTwitter(config, db, gmaps) # Get config parameters for the mining script mine_cfg = config.getConfigParameter('twitter-mine') gmaps_cfg = config.getConfigParameter('gmaps') # Get followers if mine_cfg['mine_followers'] == True: # Get value for number of followers followers = tiv.getProjectFollowers( mine_cfg['mine_number_of_followers']) # Go through all the followers for user in followers:
"Specify `from' in a range based filter. For --commit and --depth " "specify starting commit by its hash. If date filtering is requested, " "specify starting date in ISO-style format (YYYY-MM-DD). " "If omitted, the newest commit (current date respectively) is used." )) parser.add_option_group( optparse.OptionGroup( parser, "TO", "Specify `to' in a range based filter. For --commit " "specify ending commit by its hash. If date filtering is requested, " "specify ending date in ISO-style format (YYYY-MM-DD).")) parser.add_option_group( optparse.OptionGroup( parser, "QUERY_DEPTH", "Specify depth for depth filtering. " "If omitted, it defaults to %d." % Config().getGofedWebDepth())) parser.add_option_group( optparse.OptionGroup( parser, "GRAPH", 'Available graphs: "added", "modified", "cpc"; ' 'also available in abbreviate forms "a", "m" and "c". Output is in an ' 'SVG format.', )) parser.add_option_group( optparse.OptionGroup( parser, "FORMAT", 'Specify format for output string. Format should be specified by keys ' 'from response delimited by ":" - e.g. to print only "author" '
def createDB(full=False, verbose=False): scan_time_start = time() packages = [] outdated = {} valid = {} if full: packages = loadPackages() else: print "Creating list of updated builds..." err, outdated = LocalDB().getOutdatedBuilds() if err != []: print "Warning: " + "\nWarning: ".join(err) packages = outdated.keys() if verbose: print "Packages to be scanned:" for pkg in packages: print "\t%s" % pkg print "" pkg_cnt = len(packages) pkg_idx = 1 pkg_name_len = 0 for package in packages: l = len(package) if l > pkg_name_len: pkg_name_len = l pkg_cnt_len = len("%s" % pkg_cnt) print "Updating spec file provides..." err, ret = LocalDB().updatePackages(packages) if not ret: print "Error:\n" + "\n".join(err) return False ipdb_cache = ImportPathDBCache() if not ipdb_cache.load(): print "Error: %s" % ipdb_cache.getError() return False golang_pkg = Config().getGolangPkgdb() for package in packages: starttime = time() # len of pkg_idx pkg_idx_len = len("%s" % pkg_idx) sys.stdout.write("Scanning %s %s %s%s/%s " % (package, (pkg_name_len - len(package) + 3) * ".", (pkg_cnt_len - pkg_idx_len) * " " , pkg_idx, pkg_cnt)) sys.stdout.flush() pkg = Package(package) info = pkg.getInfo() # save xml into file errs = savePackageInfo(info) if errs != []: print "" print "\n".join(errs) else: if not full: valid[package] = outdated[package] pkg_idx += 1 endtime = time() elapsedtime = endtime - starttime print strftime("[%Hh %Mm %Ss]", gmtime(elapsedtime)) # update cache of imported and provided packages for item in info: devel_name = item # This is hacky and depends on info data type represenation pkg2xml_obj = info[item]['xmlobj'] imports = pkg2xml_obj.getImportedPackages() provides = pkg2xml_obj.getProvidedPackages() ipdb_cache.updateBuild(devel_name, provides, imports, package) if not ipdb_cache.flush(): print ipdb_cache.getError() scan_time_end = time() print strftime("Elapsed time %Hh %Mm %Ss", gmtime(scan_time_end - scan_time_start)) if not full: LocalDB().updateBuildsInCache(valid) return True