def login(self): r = Request("http://uploaded.net/io/login", method="POST") r.add_header("Origin", "http://uploaded.net") r.add_header("Referer", "http://uploaded.net/") r.add_header("X-Prototype-Version", "1.6.1") r.add_header("X-Requested-With", "XMLHttpRequest") r.set_payload({ "id": Config.get("id"), "pw": Config.get("password"), "_": "" # whatever }) r = r.send() if r.status_code is not 200 or "err" in r.text: log.error("ul.to: Login failed.") self.deactivate() return # Try to find out if ddl is activated r = Request("http://uploaded.net/me").send() try: html = PyQuery(r.text) if len(html("#ddl")) > 0: if html("#ddl").attr("checked") == "checked": self.ddl = True except: pass
def connect(self): """ Tente de se connecter """ try: self.ftp = Ftp(self.address, self.user, self.port, self.timeout) self.ftp.connect(self.address, self.port) self.ftp.login(self.user, self.password) if Config.is_prot_d() is True: self.ftp.prot_p() self.test_prot_d() return ("connected", self.ftp) except ftplib.all_errors as e: e = str(e) print("\n") error(e + "\n") if 'TLS' in e or 'plain' in e: cprint( "[b]Solution[/b]: Change the \"[b][blue]ftps[/blue][/b]\" options to " "\"[b][green]true[/green][/b]\" in \"[warning]{}[/warning]\"\n" .format(Config.get_config_path_for_print_only())) elif 'incorrect' in e or 'password' in e: cprint( "[b]Solution[/b]: Be sure to have the right \"[b][blue]address[/blue][/b]\"," " \"[b][blue]address[/blue][/b]\"," " \"[b][blue]user[/blue][/b]\" and" " \"[b][blue]port[/blue][/b]\"" " in \"[warning]{}[/warning]\"\n".format( Config.get_config_path_for_print_only())) sys.exit(1)
def __init__(self): if Hoster.plugin_source is not None: return plugin_base = PluginBase(package='hoster.plugins') Hoster.plugin_source = plugin_base.make_plugin_source( searchpath=['./hoster']) with Hoster.plugin_source: for p in Hoster.plugin_source.list_plugins(): h = Hoster.plugin_source.load_plugin(p) if not hasattr(h, p): log.debug("Plugin " + p + " is invalid (No class named " + p + "in module)") continue if not Config.get("hoster/" + p + "/active", True): continue h = getattr(h, p) if not configured(h): log.error("Plugin " + h.__name__ + " is activated but not configured. Deactivating.") continue h = h() h.plugin_name = p if not isinstance(h, BasePlugin): log.error("Plugin " + p + " is invalid (Not extending BasePlugin)") continue log.debug("Loaded plugin " + p) Hoster.hoster.append(h) for n in h.config_values: if not Config.get("hoster/" + p + "/" + n): print "Hoster", p, \ "needs a", n + ". You need to add a", n, "for", p, "to config.json " \ "to use the plugin."
def login(self): # Login Request("https://www.share-online.biz/user/login", method="POST", payload={ "user": Config.get("user"), "pass": Config.get("password"), "l_rememberme": 1 }).add_header("referer", "https://www.share-online.biz/").send()
def handle(self, link): r = Request.Request("http://www.debriditalia.com/api.php", payload={ 'generate': "on", 'link': link, 'p': Config.get("password"), 'u': Config.get("user")}).send() if "ERROR:" not in r.text: return Request.Request(url=r.text.strip()) if "account_expired" in r.text: log.error("debriditalia.com account expired!") self.deactivate() raise Errors.PluginError
def login(self): r = Request.Request("http://premium.to/login.php", method="POST") r.add_header('Content-Type', 'application/json') r.add_header('Origin', 'http://premium.to') r.add_header('Referer', 'http://premium.to') r.set_raw_payload(json.dumps({ "u": Config.get("user"), "p": Config.get("password"), "r": True })) response = r.send() return response.status_code == 200
def account_status(self): r = Request("https://api.premiumize.me/pm-api/v1.php", payload={ "method": "accountstatus", "params[login]": Config.get("user-id"), "params[pass]": Config.get("PIN") }).send().json if r["status"] is not 200: log.error("premiumize.me: Unable to get account status. Deactivating.") self.deactivate() return if r["result"]["type"] is "free": log.error("premiumize.me: Account is free account. Deactivating.") self.deactivate() return
def __init__(self): super(PremiumizeMe, self).__init__() self.account_status() # Now this is a decent API! r = Request("https://api.premiumize.me/pm-api/v1.php", payload={ "method": "hosterlist", "params[login]": Config.get("user-id"), "params[pass]": Config.get("PIN") }).send().json if r["status"] is not 200: log.error("premiumize.me: Receiving hoster list failed: " + r["statusmessage"]) self.deactivate() return PremiumizeMe.hostname = r["result"]["tldlist"]
def handle(self, link): r = Request("https://api.premiumize.me/pm-api/v1.php", payload={ "method": "directdownloadlink", "params[login]": Config.get("user-id"), "params[pass]": Config.get("PIN"), "params[link]": link }).send().json if r["status"] is 200: return Request(r["result"]["location"]) if r["status"] is 400 or r["status"] is 404: raise Errors.InvalidLinkError if r["status"] in [401, 402, 403]: log.error("premiumize.me: " + r["statusmessage"] + " deactivating.") self.deactivate() return raise Errors.PluginError
def test_path(): json = '{"test": {"path": true}}' with open(Config.CONFIG_PATH, "w") as f: f.write(json) Config.config_changed = -1 Config.config = None assert Config.get("test/path") is True
def handle_link(link): okay = [h for h in Hoster.hoster if h.match(link) and configured(h) and Config.get("hoster/" + h.plugin_name + "/active", False)] if len(okay) < 1: print "Can't handle link", link, "because no hoster wants to do it" return None priorized = [] for hoster in okay: priorized.append(((hoster.priority * hoster.get_downloaded_bytes()) + (10 * hoster.get_badness()), hoster)) priorized = sorted(priorized, key=lambda x: x[0]) # Try all plugins until there is no plugin left i = 0 download = None wasted = {} while i < len(priorized): start = time.time() try: download = priorized[i][1].handle(link) break except PluginError: wasted[priorized[i][1]] = (time.time() - start) i += 1 if download is None: print priorized[0][1].plugin_name, "wasn't able to process", link raise PluginError for k, v in wasted.iteritems(): k.add_badness(v) return priorized[0][1], download
def configured(hoster): if not type(hoster) is type: hoster = hoster.__class__ need = hoster.config_values for n in need: if not Config.get("hoster/" + hoster.__name__ + "/" + n): return False return True
def get_default_headers(): return {"User-Agent": Config.get("http/user-agent", "Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) " "Chrome/41.0.2228.0 Safari/537.36"), "Accept-Language": "en-US", "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8", }
def attempt(self): """ Lance l'interface de connexion manuelle """ state = "idle" Config.display_config(show=['user', 'address', 'port', 'timeout']) while state != "connected": if state == "failed": error("FTP failed to connect: {}".format(res)) try: tmp = getpass("FTP Password: "******"" else self.password except KeyboardInterrupt: cprint("\n[b]Good Bye {}![/b]".format(self.user)) sys.exit(1) state, res = self.connect() self.config() self.welcome()
def __init__(self): data = Config.load() self.address = data['address'] self.user = data['user'] self.password = "" self.port = int(data['port']) self.timeout = int(data['timeout']) self.ftp = None self.debug = False
def login(self): pw = Config.get("password") salt = pw[::-1] # seems like they take security serious hash = PBKDF2(pw, salt, 10**3).hexread(16) r = Request("https://www.oboom.com/1/login", payload={ 'auth': Config.get("email"), 'pass': hash }) r.add_header("origin", "https://www.oboom.com") r.add_header("referer", "https://www.oboom.com/") r.add_header("x-requested-with", "XMLHttpRequest") r = r.send() if r.status_code is not 200: log.error("oboom.com Login failed: " + r.text) self.deactivate() return self.cookie = r.json[1]["session"]
def test_prot_d(self): try: sys.stdout = open(os.devnull, 'w') self.ftp.retrlines('LIST') sys.stdout = sys.__stdout__ except: print("\n") error("PROT P required\n") cprint( "[b]Solution[/b]: Change the \"[b][blue]prot_d[/blue][/b]\" options to \"[b][green]true[/green][/b]\" in \"[warning]{}[/warning]\"\n" .format(Config.get_config_path_for_print_only())) sys.exit(1)
def read(self): #golang_plugin_path = "/home/jchaloup/Projects/gofed/gofed/plugins" if self.plugin_directory == "": golang_plugin_path = Config().getGolangPlugins() else: golang_plugin_path = self.plugin_directory for dirName, _, fileList in walk(golang_plugin_path): for file in fileList: if not file.endswith(".json"): continue json_file = "%s/%s" % (dirName, file) plugin_obj = Plugin(json_file) if not plugin_obj.parse(): self.err += plugin_obj.getError() return False self.plugins[file] = plugin_obj return True
def v3(name="precise-default", branch="grizzly", template_path=None, config=None, destroy=True): logging.basicConfig(level=logging.DEBUG) config = Config(config) deployment = ChefRazorDeployment.fromfile(name, branch, config, template_path) print deployment try: deployment.build() except Exception: print traceback.print_exc() deployment.destroy() sys.exit(1) if destroy: deployment.destroy()
if options.sources: sources = options.sources fp_obj = FormatedPrint(formated=True) if archive == "": fp_obj.printError("archive not set") exit(1) if specfile == "": fp_obj.printError("specfile not set") exit(1) if not options.scanalldirs: noGodeps = Config().getSkippedDirectories() else: noGodeps = [] if options.skipdirs: for dir in options.skipdirs.split(','): dir = dir.strip() if dir == "": continue noGodeps.append(dir) obj = GoLint(specfile, sources, archive, options.verbose, noGodeps = noGodeps) if not obj.test(): print obj.getError() err_cnt = obj.getErrorCount()
parser.add_option("", "", "--skip-errors", dest="skiperrors", action="store_true", default=False, help="Skip all errors during Go symbol parsing") options, args = parser.parse_args() path = "." if len(args): path = args[0] if not options.scanalldirs: noGodeps = Config().getSkippedDirectories() else: noGodeps = [] if options.skipdirs: for dir in options.skipdirs.split(','): dir = dir.strip() if dir == "": continue noGodeps.append(dir) gse_obj = GoSymbolsExtractor(path, noGodeps=noGodeps, skip_errors=options.skiperrors) if not gse_obj.extract(): sys.stderr.write("%s\n" % gse_obj.getError())
elif options.googlecode: import_path = "code.google.com/p/%s" % repo commit = options.rev elif options.bitbucket: import_path = "bitbucket.org/%s/%s" % (project, repo) commit = options.commit else: fmt_obj.printError("Provider not supported") exit(1) else: import_path = options.detect commit = options.commit if not options.scanalldirs: noGodeps = Config().getSkippedDirectories() else: noGodeps = [] if options.skipdirs: for dir in options.skipdirs.split(','): dir = dir.strip() if dir == "": continue noGodeps.append(dir) # 1. decode some package info (name, archive url, ...) # 2. set path to downloaded tarball # 3. retrieve project info from tarball # 4. generate spec file
def match(self, link): if not Config.get("app/debug", False): return False if link.startswith("http://get.testfile/"): return True
parser.add_option("", "", "--endwithupdate", dest="endupdate", action="store_true", default=False, help="stop wizard after update phase") options, args = parser.parse_args() pm = PhaseMethods(dry=options.dry, debug=options.debug) # check branches if options.branches: branches = Config().getBranches() sb = filter(lambda b: b != "", options.branches.split(",")) for b in sb: if b not in branches: print "%s branch not in common branches" % b exit(1) pm.setBranches(sorted(sb)) if options.ebranches: branches = Config().getBranches() sb = filter(lambda b: b != "", options.ebranches.split(",")) branches = list(set(branches) - set(sb)) pm.setBranches(sorted(branches)) if options.master: branches = ["master"]
def __init__(self): self.config = Config() self.razor = razor_api(self.config['razor']['ip']) self.chef = autoconfigure() self.chef.set_default()
def test_none(): with pytest.raises(TypeError): Config.get(None)
def __init__(self, url=Config().getGofedWebUrl()): self.url = url pass
import signal import gi from modules.MonitorThread import MonitorThread from modules.Utils import Utils from modules.Config import Config from modules.Menu import Menu gi.require_version('Gtk', '3.0') gi.require_version('AppIndicator3', '0.1') gi.require_version('Notify', '0.7') from gi.repository import Gtk, AppIndicator3, Notify APPINDICATOR_ID = 'NordVPN_appIndicator' signal.signal(signal.SIGINT, signal.SIG_DFL) utils = Utils() config = Config(os.path.abspath('./resources/config.ini')) def main(): path = os.path.abspath('./resources/icons/nordvpn_icon_square.png') category = AppIndicator3.IndicatorCategory.SYSTEM_SERVICES indicator = AppIndicator3.Indicator.new(APPINDICATOR_ID, path, category) indicator.set_status(AppIndicator3.IndicatorStatus.ACTIVE) indicator.set_menu(Menu(config)) Notify.init("NordVPN") MonitorThread(indicator, 20) Gtk.main() if __name__ == '__main__':
def used_alone(self): Config.display_config()
default=False, help="run the command in dry mode") parser.add_option("", "", "--verbose", dest="debug", action="store_true", default=False, help="be more verbose ") options, args = parser.parse_args() fp_obj = FormatedPrint(formated=False) branches = Config().getBranches() if options.branches != "": bs = filter(lambda b: b != "", options.branches.split(",")) for branch in bs: if branch in branches: continue print "%s branch not in a list of all branches" % branch exit(1) branches = bs if options.ebranches != "": ebs = filter(lambda b: b != "", options.ebranches.split(",")) for branch in ebs: if branch in branches:
def test_set(): Config.set("q/w/e", True) assert Config.get("q/w/e") is True
"--skip-rpmlint-errors", dest="skiprpmlint", action="store_true", default=False, help="skip rpmlint errors if any") options, args = parser.parse_args() if len(args) != 1: print "Synopsis: [--user=USER|-u USER] [--skip-koji] SPEC" exit(1) specfile = args[0] user = options.user if user == "": user = Config().getFASUsername() if not os.path.exists(specfile): print "Spec file %s not found" % specfile exit(1) obj = SpecParser(specfile) if not obj.parse(): print obj.getError() exit(1) provider = obj.getMacro("provider") repo = obj.getMacro("repo") commit = obj.getMacro("commit") summary = obj.getTag("summary") name = obj.getTag("name")
def __init__(self, handler=''): config = Config.getConfig() config.read() logging_file = config.System.Logging logging.config.fileConfig(logging_file) self.logger = logging.getLogger(handler)
def createDB(full=False, verbose=False): scan_time_start = time() packages = [] outdated = {} valid = {} if full: packages = loadPackages() else: print "Creating list of updated builds..." err, outdated = LocalDB().getOutdatedBuilds() if err != []: print "Warning: " + "\nWarning: ".join(err) packages = outdated.keys() if verbose: print "Packages to be scanned:" for pkg in packages: print "\t%s" % pkg print "" pkg_cnt = len(packages) pkg_idx = 1 pkg_name_len = 0 for package in packages: l = len(package) if l > pkg_name_len: pkg_name_len = l pkg_cnt_len = len("%s" % pkg_cnt) print "Updating spec file provides..." err, ret = LocalDB().updatePackages(packages) if not ret: print "Error:\n" + "\n".join(err) return False ipdb_cache = ImportPathDBCache() if not ipdb_cache.load(): print "Error: %s" % ipdb_cache.getError() return False golang_pkg = Config().getGolangPkgdb() for package in packages: starttime = time() # len of pkg_idx pkg_idx_len = len("%s" % pkg_idx) sys.stdout.write("Scanning %s %s %s%s/%s " % (package, (pkg_name_len - len(package) + 3) * ".", (pkg_cnt_len - pkg_idx_len) * " " , pkg_idx, pkg_cnt)) sys.stdout.flush() pkg = Package(package) info = pkg.getInfo() # save xml into file errs = savePackageInfo(info) if errs != []: print "" print "\n".join(errs) else: if not full: valid[package] = outdated[package] pkg_idx += 1 endtime = time() elapsedtime = endtime - starttime print strftime("[%Hh %Mm %Ss]", gmtime(elapsedtime)) # update cache of imported and provided packages for item in info: devel_name = item # This is hacky and depends on info data type represenation pkg2xml_obj = info[item]['xmlobj'] imports = pkg2xml_obj.getImportedPackages() provides = pkg2xml_obj.getProvidedPackages() ipdb_cache.updateBuild(devel_name, provides, imports, package) if not ipdb_cache.flush(): print ipdb_cache.getError() scan_time_end = time() print strftime("Elapsed time %Hh %Mm %Ss", gmtime(scan_time_end - scan_time_start)) if not full: LocalDB().updateBuildsInCache(valid) return True
def test_unset(): assert Config.get("q/q") is None
"Specify `from' in a range based filter. For --commit and --depth " "specify starting commit by its hash. If date filtering is requested, " "specify starting date in ISO-style format (YYYY-MM-DD). " "If omitted, the newest commit (current date respectively) is used." )) parser.add_option_group( optparse.OptionGroup( parser, "TO", "Specify `to' in a range based filter. For --commit " "specify ending commit by its hash. If date filtering is requested, " "specify ending date in ISO-style format (YYYY-MM-DD).")) parser.add_option_group( optparse.OptionGroup( parser, "QUERY_DEPTH", "Specify depth for depth filtering. " "If omitted, it defaults to %d." % Config().getGofedWebDepth())) parser.add_option_group( optparse.OptionGroup( parser, "GRAPH", 'Available graphs: "added", "modified", "cpc"; ' 'also available in abbreviate forms "a", "m" and "c". Output is in an ' 'SVG format.', )) parser.add_option_group( optparse.OptionGroup( parser, "FORMAT", 'Specify format for output string. Format should be specified by keys ' 'from response delimited by ":" - e.g. to print only "author" '
def test_getall(): Config.get_all()
#!/usr/bin/python3 import json from modules.Config import Config # TODO: Make it prettier from modules.DB import DB # TODO: Make it prettier from modules.Twitter import TIVTwitter # TODO: Make it prettier from modules.File import File from modules.Gmaps import Gmaps # Start of application if __name__ == '__main__': config = Config() db = DB(config) gmaps = Gmaps(config) twitter = TIVTwitter(config, db, gmaps) result = db.execute( "select a.account_screen_name,a.account_pic_url, a.account_url, a.account_name, t.tweet_lat, t.tweet_long, t.tweet_time, t.tweet_content from account a, tweets t where a.account_id = t.account_id" ) json_list = [] for row in result: dct = {} dct['screen_name'] = row[0].strip() dct['profile_pic'] = row[1].strip() dct['twitter_url'] = row[2].strip() dct['account_name'] = row[3].strip() dct['lat'] = row[4] dct['lng'] = row[5] dct['time'] = row[6].strip() dct['tweet_content'] = row[7].strip() json_list.append(dct)
def test_save(): x = get_config_hash() Config.set("foo/var", "raw") y = get_config_hash() assert x != y
#!/usr/bin/python3 #import datetime from modules.Config import Config # TODO: Make it prettier from modules.DB import DB # TODO: Make it prettier from modules.Twitter import TIVTwitter # TODO: Make it prettier from modules.File import File from modules.Gmaps import Gmaps # Start of our mining application if __name__ == '__main__': # Create instances config = Config() db = DB(config) gmaps = Gmaps(config) tiv = TIVTwitter(config, db, gmaps) # Get config parameters for the mining script mine_cfg = config.getConfigParameter('twitter-mine') gmaps_cfg = config.getConfigParameter('gmaps') # Get followers if mine_cfg['mine_followers'] == True: # Get value for number of followers followers = tiv.getProjectFollowers( mine_cfg['mine_number_of_followers']) # Go through all the followers for user in followers:
import os import sys # https://qiita.com/reinhardhq/items/838df0bf09611f3c5872 sys.path.insert(0, os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))) from modules.Config import Config config = Config.getConfig() config.read() print("config.System.Version=" + config.System.Version) print("config.System.Path=" + config.System.Path) print("config.System.Logging=" + config.System.Logging) print("config.System.FlowDefine=" + config.System.FlowDefine) print("config.WatchDog.Interval={0}".format(config.WatchDog.Interval)) print("config.WatchDog.OverShoot={0}".format(config.WatchDog.OverShoot)) print("config.VideoPlay.VideoFile={0}".format(config.VideoPlay.VideoFile)) flow_def = config.FlowDefine.Flow print(flow_def) print(flow_def["Element3"]) print(flow_def["Element3"]["object"])
parser.add_argument("--show-traffic", nargs='?', help="Prints the traffic statistics and exits.", choices=["user", "hoster", "all"]) args = parser.parse_args() if args.show_traffic: choice = [args.show_traffic] if choice == ["all"]: choice = ["user", "hoster"] traffic_data = Shove('file://traffic_log/') for name in choice: Stats.make_stats(traffic_data, name) exit() try: if os.geteuid() == 0: # Oh no, someone started OCHproxy as root! # But maybe we can drop the priviliges after getting the port. if Config.get("app/user", None) is None or Config.get("app/group", None) is None: # There can't be a good reason to do this, can it? print "Hey, you shouldn't run this as root!" print "If you want to use a priviliged port (80 is a good choice), you can set a group and user in" print "config.json for OCHproxy to use after binding the port." print "You need to wait at least 5 seconds if you insist on this reckless behaviour." for _i in xrange(1, 5): print ".", time.sleep(1) except AttributeError: pass # This is Windows. log.info("Starting OCHproxy...") socket.setdefaulttimeout(30) Server()