def download(url, dest): if url.startswith("data:"): return if fileset.tas(dest): return common.mkdirs(os.path.split(dest)[0]) try: common.retrieve(url, dest) if args["type"] == "html-single" and dest.endswith(".html"): get_deps_html(url, dest) if args["type"] == "html-single" and dest.endswith(".css"): get_deps_css(url, dest) common.mkro(dest) except urllib.error.HTTPError as e: if e.code == 403 or e.code == 404: warn("WARNING: %s on %s, continuing..." % (e, url)) else: raise
def main(): global config config = common.load_config() # Permit write of UTF-8 characters to stderr (required when piping output) if sys.stderr.encoding == None: sys.stderr = codecs.getwriter("UTF-8")(sys.stderr) common.mkdirs(config["pt-base"]) os.chdir(config["pt-base"]) _lock = common.Lock(".lock") tls.s = requests.Session() login() for i in range(int(config["pt-threads"])): t = threading.Thread(target = worker, name = i, args = [tls.s.cookies]) t.daemon = True t.start() read_project_list() while q.unfinished_tasks: time.sleep(1) q.join() cleanup() common.write_sync_done()
def sync_dir(url, path, username, password, odponly, alfresco): try: if alfresco: (dirs, files) = ls_alfresco(path, tls.conn) else: (dirs, files) = ls(path, tls.conn) except ListFailure: fileset.ignore_dir(urllib.parse.unquote(path)[1:]) return fileset.add_dir(urllib.parse.unquote(path)[1:]) common.mkdirs(urllib.parse.unquote(path)[1:]) for d in dirs: q.put( (sync_dir, url, d["path"], username, password, odponly, alfresco)) for f in files: if odponly == 1 and not f["path"].endswith(".odp"): continue dest = urllib.parse.unquote(f["path"])[1:] fileset.add_file(dest) if needs_download(dest, f): downloadq.append((download, urllib.parse.urljoin(url, f["path"]), dest, username, password))
def download(url, path): with lock: if path in files: return files.add(path) if os.path.exists(path): r = tls.s.head(url) mtime = common.parse_last_modified(r.headers["Last-Modified"]) if os.path.getmtime(path) == mtime and \ os.path.getsize(path) == int(r.headers["Content-Length"]): return common.mkdirs(os.path.dirname(path)) log(url + " -> " + path) r = tls.s.get(url, stream=True) temppath = common.mktemppath(path) with open(temppath, "wb") as f: for data in r.iter_content(4096): f.write(data) f.flush() os.fsync(f.fileno()) mtime = common.parse_last_modified(r.headers["Last-Modified"]) os.utime(temppath, (mtime, mtime)) common.mkro(temppath) common.rename(temppath, path)
def download(item, db, tries): if item["href"] in db: path = db.get(item["href"]) else: f = common.retrieve_m(config["clearspace-root"] + item["href"], tries=tries) doc = WikiDoc(f.read()) f.close() path = doc.path + "/" + doc.filename if want(path): skip = False if os.path.exists(path): st = os.stat(path) if st.st_mtime == doc.mtime: skip = True if not skip: common.mkdirs(doc.path) common.retrieve(config["clearspace-root"] + doc.filehref, path, force=True, tries=tries) common.mkro(path) os.utime(path, (doc.mtime, doc.mtime)) updatedbs(db, keep, item["href"], path)
def pngquant(filepath, filebase, comtype): locate = './package/%s/res/%s' % (comtype, filebase) helper.mkdirs(locate) os.system('pngquant --quality=85-90 --floyd=0.5 -o %s %s --force' % (locate, filepath)) osize = helper.format_file_size(os.path.getsize(filepath)) csize = helper.format_file_size(os.path.getsize(locate))
def jpegoptim(filepath, filebase, comtype): locate = './package/%s/res/%s' % (comtype, filebase) locdir = os.path.dirname(locate) helper.mkdirs(locate) os.system('jpegoptim -m50 -d %s %s -o' % (locdir, filepath)) osize = helper.format_file_size(os.path.getsize(filepath)) csize = helper.format_file_size(os.path.getsize(locate))
def write_log(relative_path, log_message): log_path = os.path.join(env.RESULT_PATH, "result", relative_path) common.mkdirs(os.path.dirname(log_path)) with open(log_path, 'a') as f: f.write(log_message) f.close()
def main(): global config config = common.load_config() # Permit write of UTF-8 characters to stderr (required when piping output) if sys.stderr.encoding == None: sys.stderr = codecs.getwriter("UTF-8")(sys.stderr) common.mkdirs(config["pt-base"]) os.chdir(config["pt-base"]) _lock = common.Lock(".lock") tls.s = requests.Session() login() for i in range(int(config["pt-threads"])): t = threading.Thread(target=worker, name=i, args=[tls.s.cookies]) t.daemon = True t.start() read_project_list() while q.unfinished_tasks: time.sleep(1) q.join() cleanup() common.write_sync_done()
def download(url, path, mtime): with lock: if path in files: return files.add(path) if os.path.exists(path) and os.path.getmtime(path) == mtime: return common.mkdirs(os.path.dirname(path)) log(url + " -> " + path) r = get(url, stream=True) p = os.path.split(path) temppath = os.path.join(p[0], "." + p[1]) with open(temppath, "wb") as f: for data in r.iter_content(4096): f.write(data) f.flush() os.fsync(f.fileno()) os.utime(temppath, (mtime, mtime)) common.mkro(temppath) common.rename(temppath, path)
def main(username, password): global config config = common.load_config() common.mkdirs(config["jive-base"]) os.chdir(config["jive-base"]) l = common.Lock(".lock") global max_index try: with open(".max-index") as f: max_index = int(f.read()) except IOError: pass tls.s = requests.Session() login(username, password) threads = int(config["jive-threads"]) for i in range(threads): t = threading.Thread(target=worker, name=i, args=(tls.s.cookies, )) t.daemon = True t.start() for c in contents(): q.put((iter_content, c)) q.join() cleanup() common.write_sync_done() global index with open(".max-index", "w") as f: print(index, file=f)
def sync_dir(url, path, username, password, odponly, alfresco): try: if alfresco: (dirs, files) = ls_alfresco(path, tls.conn) else: (dirs, files) = ls(path, tls.conn) except ListFailure: fileset.ignore_dir(urllib.parse.unquote(path)[1:]) return fileset.add_dir(urllib.parse.unquote(path)[1:]) common.mkdirs(urllib.parse.unquote(path)[1:]) for d in dirs: q.put((sync_dir, url, d["path"], username, password, odponly, alfresco)) for f in files: if odponly == 1 and not f["path"].endswith(".odp"): continue dest = urllib.parse.unquote(f["path"])[1:] fileset.add_file(dest) if needs_download(dest, f): downloadq.append((download, urllib.parse.urljoin(url, f["path"]), dest, username, password))
def main(): global config config = common.load_config() common.mkdirs(config["product-docs-base"]) os.chdir(config["product-docs-base"]) l = common.Lock(".lock") get_dump() valid_files = set([".lock", ".sync-done"]) pool = multiprocessing.Pool(processes=int(config["product-docs-threads"])) for x in iter_dump(): x["product_"] = x["product"].replace("_", " ") url = "https://access.redhat.com/documentation/%(language)s/" \ "%(product)s/%(version)s/pdf/%(name)s/" \ "%(product)s-%(version)s-%(name)s-%(language)s.pdf" % x f = "%(product_)s/%(version)s/" \ "%(product)s-%(version)s-%(name)s-%(language)s.pdf" % x pool.apply_async(download, (url, f)) valid_files.add(f) pool.close() pool.join() remove_invalid_files(valid_files) common.write_sync_done()
def download(url, path): with lock: if path in files: return files.add(path) if os.path.exists(path): r = tls.s.head(url) mtime = common.parse_last_modified(r.headers["Last-Modified"]) if os.path.getmtime(path) == mtime and \ os.path.getsize(path) == int(r.headers["Content-Length"]): return common.mkdirs(os.path.dirname(path)) log(url + " -> " + path) r = tls.s.get(url, stream = True) temppath = common.mktemppath(path) with open(temppath, "wb") as f: for data in r.iter_content(4096): f.write(data) f.flush() os.fsync(f.fileno()) mtime = common.parse_last_modified(r.headers["Last-Modified"]) os.utime(temppath, (mtime, mtime)) common.mkro(temppath) common.rename(temppath, path)
def download(url, path, mtime): with lock: if path in files: return files.add(path) if os.path.exists(path) and os.path.getmtime(path) == mtime: return common.mkdirs(os.path.dirname(path)) log(url + " -> " + path) r = get(url, stream = True) p = os.path.split(path) temppath = os.path.join(p[0], "." + p[1]) with open(temppath, "wb") as f: for data in r.iter_content(4096): f.write(data) f.flush() os.fsync(f.fileno()) os.utime(temppath, (mtime, mtime)) common.mkro(temppath) common.rename(temppath, path)
def main(username, password): global config config = common.load_config() common.mkdirs(config["jive-base"]) os.chdir(config["jive-base"]) l = common.Lock(".lock") global max_index try: with open(".max-index") as f: max_index = int(f.read()) except IOError: pass tls.s = requests.Session() login(username, password) threads = int(config["jive-threads"]) for i in range(threads): t = threading.Thread(target = worker, name = i, args = (tls.s.cookies, )) t.daemon = True t.start() for c in contents(): q.put((iter_content, c)) q.join() cleanup() common.write_sync_done() global index with open(".max-index", "w") as f: print(index, file = f)
def download(url, dest, username, password): pm = urllib2.HTTPPasswordMgrWithDefaultRealm() pm.add_password(None, url, username, password) opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pm)) common.mkdirs(os.path.split(dest)[0]) common.retrieve(url, dest, opener = opener, tries = 10, force = True) common.mkro(dest)
def download(url, dest, username, password): pm = urllib.request.HTTPPasswordMgrWithDefaultRealm() pm.add_password(None, url, username, password) opener = urllib.request.build_opener( urllib.request.HTTPBasicAuthHandler(pm)) common.mkdirs(os.path.split(dest)[0]) common.retrieve(url, dest, opener=opener, tries=10, force=True) common.mkro(dest)
def start_test(case_name): env.CASE_NAME = case_name env.CASE_START_TIME = datetime.datetime.now().replace(microsecond=0) env.CASE_PASS = True common.mkdirs("%s\\Result\\screenshots\\" % env.PROJECT_PATH) common.mkdirs("%s\\Result\\testcase\\" % env.PROJECT_PATH) with open(u"%s\\Result\\testcase\\%s__%s.log" % (env.PROJECT_PATH, env.CASE_NAME, common.stamp_date()), "a") as f: f.write("\n************** Test Case [%s] [%s] ***************\n" % (env.CASE_NAME, env.platformName))
def download_item(item, extension, tries=1): dstfile = os.path.join(item.type_, item.pageurl.split("/")[-1]) + extension common.mkdirs(item.type_) try: print("\r[%u]" % item.number, end="", file=sys.stderr) common.retrieve(item.dlurl, dstfile, tries=tries) common.mkro(dstfile) except urllib.error.HTTPError as e: warn("can't download item at %s (#%u, %s, %s) (%s), continuing..." % \ (item.dlurl, item.number, item.title, item.type_, e))
def download_item(item, extension, tries = 1): dstfile = os.path.join(item.type_, item.pageurl.split("/")[-1]) + extension common.mkdirs(item.type_) try: print("\r[%u]" % item.number, end = "", file = sys.stderr) common.retrieve(item.dlurl, dstfile, tries = tries) common.mkro(dstfile) except urllib.error.HTTPError as e: warn("can't download item at %s (#%u, %s, %s) (%s), continuing..." % \ (item.dlurl, item.number, item.title, item.type_, e))
def step_fail(message): screenshot_name = "%s__%s__Fail__%s.png" % (env.CASE_NAME, env.platformName, common.stamp_datetime_coherent()) with open(u"%s\\Result\\testcase\\%s__%s.log" % (env.PROJECT_PATH, env.CASE_NAME, common.stamp_date()), "a") as f: f.write("------------ Fail [%s] -------------------\n" % common.stamp_datetime()) f.write("%s Fail: %s, Check ScreenShot [%s]\n" % (common.stamp_datetime(), message, screenshot_name)) f.write("------------ Fail [%s] --------------------------------------------\n" % common.stamp_datetime()) common.mkdirs("%s\\Result\\screenshots\\" % env.PROJECT_PATH) env.driver.save_screenshot(u"%s\\Result\\screenshots\\%s" % (env.PROJECT_PATH, screenshot_name)) env.CASE_PASS = False raise AssertionError(message)
def sync(query, keep): xml = common.retrieve_m(config["gsa-url"] + "?client=internal&output=xml&num=1000&filter=0&q=" + query, tries = 10) xml = lxml.etree.parse(xml) if int(xml.xpath("//M/text()")[0]) == 1000: raise Exception("search returned too many results") for result in xml.xpath("//U/text()"): dest = result.split("//")[1] dest = dest.replace("~", "") common.mkdirs(os.path.split(dest)[0]) common.retrieve(result, dest, tries = 10) common.mkro(dest) keep.add(dest)
def extract(path): if config["attachments-enabled"] != "1": return print("Extracting attachments from %s..." % path, file = sys.stderr) mbox = mailbox.mbox(config["lists-base"] + "/" + path) for msg in mbox.keys(): index = 0 for part in mbox[msg].walk(): fn = part.get_filename() typ = part.get_content_type() if fn is not None \ and not mailindex.decode(part.get("Content-Disposition", "inline")).startswith("inline") \ and typ not in \ ('application/pgp-signature', 'application/pkcs7-signature', 'application/x-pkcs7-signature', 'image/x-icon', 'message/external-body', 'message/rfc822', 'text/calendar', 'text/x-vcard'): p = config["attachments-base"] + "/" + path try: fn = cleanfilename(fn) if config["attachments-odponly"] != "1" or \ fn.lower().endswith(".odp") or \ typ.lower().startswith("application/vnd.oasis.opendocument.presentation"): common.mkdirs(p) p += "/%03u-%03u-%s" % (msg, index, fn) if not os.path.exists(p): temppath = common.mktemppath(p) f = open(temppath, "wb") f.write(part.get_payload(decode = True)) f.flush() os.fsync(f.fileno()) f.close() common.rename(temppath, p) common.mkro(p) except UnicodeEncodeError: pass index += 1
def handle_error(): if env.CASE_PASS == False: return if sys.exc_info()[0] != None: step_normal(common.exception_error()) screenshot_name = "%s__%s__Error_%s.png" % (env.CASE_NAME, env.platformName, common.stamp_datetime_coherent()) common.mkdirs("%s\\Result\\screenshots\\" % env.PROJECT_PATH) env.driver.save_screenshot(u"%s\\Result\\screenshots\\%s" % (env.PROJECT_PATH, screenshot_name)) step_normal("Please check screen short [%s]" % screenshot_name) env.CASE_PASS = False
def generate_html_report(test_status, test_cases=[], countdown=True): common.mkdirs(os.path.join(env.RESULT_PATH, "result")) with open(os.path.join(env.RESULT_PATH, "result", "index.html"), "w") as f: f.write(html_source_header()) f.write(html_source_body(countdown=countdown)) f.write(html_source_table1(test_status)) f.write(html_source_table2()) f.write(html_source_test_cases(test_cases)) f.write(html_source_end_table()) f.write(html_source_version_info()) f.write(html_source_foot())
def sync(query, keep): xml = common.retrieve_m( config["gsa-url"] + "?client=internal&output=xml&num=1000&filter=0&q=" + query, tries=10) xml = lxml.etree.parse(xml) if int(xml.xpath("//M/text()")[0]) == 1000: raise Exception("search returned too many results") for result in xml.xpath("//U/text()"): dest = result.split("//")[1] dest = dest.replace("~", "") common.mkdirs(os.path.split(dest)[0]) common.retrieve(result, dest, tries=10) common.mkro(dest) keep.add(dest)
def download(url, p, force=False): if os.path.exists(p) and not force: return r = requests.get(url, stream=True) if r.status_code / 100 != 2: return parent = os.path.dirname(p) or "." common.mkdirs(parent) with tempfile.NamedTemporaryFile(dir=parent) as f: for chunk in r.iter_content(chunk_size=2**20): f.write(chunk) os.chmod(f.name, 0o666 & ~umask) os.rename(f.name, p) f._closer.delete = False
def _add(self, file_path, version): #if not cache.update(CCFile(file_path, version)): # return #if [e for e in cfg.getExclude() if fnmatch(file_path, e)]: # return toFile = join(self.ct.cc_dir, file_path) common.mkdirs(toFile) common.removeFile(toFile) try: self.ct.cc_exec(['get','-to', toFile, cc_file(file_path, version)]) except Exception as e: # we already set core.longpaths as true to support long path in git. # So if exceptions raised, we just need to raise it. raise AppError(e.message) #if len(file_path) < 200: # raise #print("Ignoring %s as it may be related to https://github.com/charleso/git-cc/issues/9" % file_path) os.chmod(toFile, os.stat(toFile).st_mode | stat.S_IWRITE) self.repo.git.add('-f', file_path)
def sync_webdav(url, dest, username, password, odponly, alfresco): common.mkdirs(dest) os.chdir(dest) lock = common.Lock(".lock") urlp = urllib.parse.urlparse(url) global fileset fileset = FileSet(urlp.path[1:]) global downloadq downloadq = LockedList() global q q = queue.Queue() threads = threads_create(int(config["webdav-threads"]), (urlp.netloc, username, password)) q.put((sync_dir, url, urlp.path.rstrip("/"), username, password, int(odponly), int(alfresco))) q.join() msg("INFO: will download %u files" % len(downloadq.list)) for d in downloadq.list: q.put(d) threads_destroy(threads) if fileset.dirty: cleanup() common.write_sync_done() lock.unlock()
def sync_webdav(url, dest, username, password, odponly, alfresco): common.mkdirs(dest) os.chdir(dest) lock = common.Lock(".lock") urlp = urlparse.urlparse(url) global fileset fileset = FileSet(urlp.path[1:]) global downloadq downloadq = LockedList() global q q = Queue.Queue() threads = threads_create(int(config["webdav-threads"]), (urlp.netloc, username, password)) q.put((sync_dir, url, urlp.path.rstrip("/"), username, password, int(odponly), int(alfresco))) q.join() msg("INFO: will download %u files" % len(downloadq.list)) for d in downloadq.list: q.put(d) threads_destroy(threads) if fileset.dirty: cleanup() common.write_sync_done() lock.unlock()
def main(username, password): global config config = common.load_config() common.mkdirs(config["jive-base"]) os.chdir(config["jive-base"]) l = common.Lock(".lock") tls.s = requests.Session() login(username, password) threads = int(config["jive-threads"]) for i in range(threads): t = threading.Thread(target = worker, name = i, args = (tls.s.cookies, )) t.daemon = True t.start() for c in contents(): q.put((iter_content, c)) q.join() cleanup() common.write_sync_done()
xmlargs = xml.xpath("//argument") for i in range(len(xmlargs) - 1): if xmlargs[i].text == "-play": common.retrieve(xmlargs[i + 1].text, vcrfile) xmlargs[i + 1].text = "file://" + config["elluminate-base"] + "/" + vcrfile break fetchjars(xml) xml.set("codebase", "file://" + config["elluminate-base"] + "/" + JARS) f = open(jnlpfile, "w") f.write(lxml.etree.tostring(xml, xml_declaration = True)) f.close() print jnlpfile if __name__ == "__main__": global config config = common.load_config() args = parse_args() common.mkdirs(config["elluminate-base"] + "/" + JARS) os.chdir(config["elluminate-base"]) cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) urllib2.install_opener(opener) args.func(args)
if __name__ == "__main__": global config config = common.load_config() args = parse_args() execpath = config["lgrep-exec"] if execpath is None: print >>sys.stderr, "Please configure your MUA in $HOME/.satools before running %s." % sys.argv[0] sys.exit(1) query = " ".join(map(quote, args["querystring"])) maildb = mailindex.MailDB(args["base"] + "/.index") common.mkdirs(os.path.split(config["lgrep-mailbox"])[0]) common.unlink(config["lgrep-mailbox"]) mbox = open(config["lgrep-mailbox"], "w") for row in maildb.search(query): f = open(os.sep.join((args["base"], row["path"]))) f.seek(row["offset"]) mbox.write(f.read(row["length"])) f.close() mbox.close() common.mkro(config["lgrep-mailbox"]) maildb.close() execpath = execpath.replace("%filename", os.path.split(config["lgrep-mailbox"])[1])
def isgzip(f): bytes = f.read(2) f.seek(0) return bytes == "\x1F\x8B" if __name__ == "__main__": warnings = 0 global config config = common.load_config() if not config["lists-sync"]: print >>sys.stderr, "Please configure lists in $HOME/.satools before running %s." % sys.argv[0] sys.exit(1) common.mkdirs(config["lists-base"]) os.chdir(config["lists-base"]) lock = common.Lock(".lock") db = common.DB(".sync-db") now = time.gmtime() for line in config["lists-sync"]: line = line.split(" ") url = line[0].rstrip("/") _list = url.split("/")[-1] credentials = None if len(line) == 3:
def download(opener, href, dest): common.mkdirs(os.path.split(dest)[0]) common.retrieve(href, dest, opener = opener, tries = 10) common.mkro(dest)
def write_assets_lua(assets_content): assets_filepath = os.path.abspath(get_assets_path()) if helper.mkdirs(assets_filepath): with open(assets_filepath, 'wt') as f: f.write(assets_content)
common.mkdirs(os.path.split(dest)[0]) common.retrieve(result, dest, tries=10) common.mkro(dest) keep.add(dest) if __name__ == "__main__": global config config = common.load_config() args = parse_args() if args["quiet"]: common.progress = lambda x, y: None common.progress_finish = lambda: None common.mkdirs(config["gsa-base"]) os.chdir(config["gsa-base"]) lock = common.Lock(".lock") keep = set() for query in config["gsa-sync"]: sync(query, keep) for dirpath, dirnames, filenames in os.walk(".", topdown=False): for f in filenames: path = os.path.relpath(dirpath, ".") + "/" + f if not path.startswith("./.") and path not in keep: os.unlink(path) if not os.listdir(dirpath):
''' db log (to mongo) ''' groupDir = os.getcwd() caseSuite = os.path.basename(groupDir) DIVIDING_LINE = "====================================================================================================" ''' log directory and files initial ''' common.mkdirs(LOG_DIR) common.mkdirs(HTML_DIR) common.mkdirs(PIC_DIR) with open(LOG_ABS,"w") as f: pass print "[LOG]\t\t\t" + LOG_ABS with open(USERLOG_ABS,"w") as f: pass print "[USERLOG]\t\t" + USERLOG_ABS ''' Log methods: wd(message) Web driver wd_info(message) Web driver Info step_normal(message) Browser Operation Log
def save_screen_shot(image_name): image_path = os.path.join(env.RESULT_PATH, "result", "screenshots") common.mkdirs(image_path) #截图 env.threadlocal.BROWSER.save_screenshot( os.path.join(image_path, image_name))
warnings = 0 global config config = common.load_config() args = parse_args() if args["quiet"]: common.progress = lambda x, y: None common.progress_finish = lambda: None if not config["lists-sync"]: print("Please configure lists in $HOME/.satools before running %s." % sys.argv[0], file=sys.stderr) sys.exit(1) common.mkdirs(config["lists-base"]) os.chdir(config["lists-base"]) lock = common.Lock(".lock") db = common.DB(".sync-db") if not args["list"]: thunderbird.init() now = time.gmtime() for line in config["lists-sync"]: line = line.split(" ") url = line[0].rstrip("/") _list = url.split("/")[-1]
def updatedbs(db, keep, href, path): threadlock.acquire() db.add(href, path) if want(path): keep.add(path) threadlock.release() if __name__ == "__main__": global config config = common.load_config() args = parse_args() common.mkdirs(config["clearspace-base"]) os.chdir(config["clearspace-base"]) lock = common.Lock(".lock") if args["all"]: common.unlink(".sync-db") db = common.DB(".sync-db") db.cmpfn = lambda x, y: cmp(int(x.split("-")[1]), int(y.split("-")[1])) pm = urllib2.HTTPPasswordMgrWithDefaultRealm() pm.add_password(None, config["clearspace-root"], config["clearspace-username"], config["clearspace-password"]) opener = urllib2.build_opener(urllib2.HTTPBasicAuthHandler(pm)) urllib2.install_opener(opener)
dest = dest.replace("~", "") common.mkdirs(os.path.split(dest)[0]) common.retrieve(result, dest, tries = 10) common.mkro(dest) keep.add(dest) if __name__ == "__main__": global config config = common.load_config() args = parse_args() if args["quiet"]: common.progress = lambda x, y: None common.progress_finish = lambda: None common.mkdirs(config["gsa-base"]) os.chdir(config["gsa-base"]) lock = common.Lock(".lock") keep = set() for query in config["gsa-sync"]: sync(query, keep) for dirpath, dirnames, filenames in os.walk(".", topdown = False): for f in filenames: path = os.path.relpath(dirpath, ".") + "/" + f if not path.startswith("./.") and path not in keep: os.unlink(path) if not os.listdir(dirpath):
threadlock.release() def worker(): while True: items = q.get() try: items[0](*items[1:]) finally: q.task_done() if __name__ == "__main__": config = common.load_config() common.mkdirs(config["resourcelibrary-base"]) os.chdir(config["resourcelibrary-base"]) # Prevent multiple application instances running at once lock = common.Lock(".lock") # http://www.redhat.com/resourcelibrary relies on tracking cookies cj = http.cookiejar.CookieJar() opener = urllib.request.build_opener( urllib.request.HTTPCookieProcessor(cj)) urllib.request.install_opener(opener) # Permit write of UTF-8 characters to stderr (required when piping output) if sys.stderr.encoding == None: sys.stderr = codecs.getwriter("UTF-8")(sys.stderr)
for f in filenames: path = os.path.join(dp, f) if not path.startswith("./.") and path not in fileset.s: os.unlink(path) for dirpath, dirnames, filenames in os.walk(".", topdown = False): if dirpath != "." and not os.listdir(dirpath): os.rmdir(dirpath) if __name__ == "__main__": config = common.load_config() global args args = parse_args(config) common.mkdirs(config["product-docs-base"]) os.chdir(config["product-docs-base"]) filters = config["product-docs-filter"] filters.extend(args["filters"]) # if last filter is include, add an exclude everything filter, to do the # expected if filters and filters[-1][0] == "i": filters.append("x/.*/") lock = common.Lock(".lock") if int(config["product-docs-threads"]) > 1: common.progress = lambda x, y: None common.progress_finish = lambda: None
# links are time-sensitive; if get_isos() is out-of-date will need # to re-fetch if time.time() > start + 300: finished = False break if __name__ == "__main__": config = common.load_config() args = parse_args() if args["list"]: for iso in get_isos(): print("[%c] %s" % ([" ", "*"][iso.match()], iso.name)) sys.exit(0) common.mkdirs(config["rhn-dumps-base"]) os.chdir(config["rhn-dumps-base"]) lock = common.Lock(".lock") threads = [] for i in range(int(config["rhn-dumps-threads"])): t = threading.Thread(target = worker, name = i) t.daemon = True t.start() threads.append(t) for t in threads: # join/sleep so CTRL+C works correctly while t.isAlive(): t.join(1)
xmlargs[i + 1].text = "file://" + config["elluminate-base"] + "/" + vcrfile break fetchjars(xml) xml.set("codebase", "file://" + config["elluminate-base"] + "/" + JARS) f = open(jnlpfile, "wb") f.write(lxml.etree.tostring(xml, xml_declaration = True)) f.close() print(jnlpfile) if __name__ == "__main__": global config config = common.load_config() args = parse_args() if not "func" in args: ap.print_help() sys.exit(1) common.mkdirs(config["elluminate-base"] + "/" + JARS) os.chdir(config["elluminate-base"]) cj = http.cookiejar.CookieJar() opener = urllib.request.build_opener(urllib.request.HTTPCookieProcessor(cj)) urllib.request.install_opener(opener) args.func(args)
config = common.load_config() args = parse_args() execpath = config["lgrep-exec"] if execpath is None: print( "Please configure your MUA in $HOME/.satools before running %s." % sys.argv[0], file=sys.stderr) sys.exit(1) query = " ".join(args["querystring"]) maildb = mailindex.MailDB(args["base"] + "/.index") common.mkdirs(os.path.split(config["lgrep-mailbox"])[0]) common.unlink(config["lgrep-mailbox"]) mbox = open(config["lgrep-mailbox"], "wb") for row in maildb.search(query): f = open(os.sep.join((args["base"], row["path"])), "rb") f.seek(row["offset"]) mbox.write(f.read(row["length"])) f.close() mbox.close() common.mkro(config["lgrep-mailbox"]) maildb.close() execpath = execpath.replace("%filename",
html = lxml.html.fromstring(html) params = {} for i in html.xpath("//form[@name = 'login']//input"): params[i.get("name")] = i.get("value") params["email"] = config["pt-username"] params["password"] = config["pt-password"] common.retrieve_m(url, urllib.urlencode(params), opener = opener, tries = 10) if __name__ == "__main__": global config config = common.load_config() common.mkdirs(config["pt-base"]) os.chdir(config["pt-base"]) lock = common.Lock(".lock") cj = cookielib.CookieJar() opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj)) threads = int(config["pt-threads"]) if threads > 1: common.progress = lambda x, y: None common.progress_finish = lambda: None for i in range(threads): t = threading.Thread(target = worker, name = i) t.daemon = True