def retrieveFile(self, libName, breakpadId, symbolFilename): """ Returns a tuple: |success, data| """ skipURLs = [] for attempt in xrange(config['retries']): for symbolURL in self.symbolURLs: if symbolURL in skipURLs: continue url = self.getSymbolURL(symbolURL, libName, breakpadId, symbolFilename) success, exists, data = self.fetchURL(url) if not success: continue if not exists: # Don't retry this server if we know the file is not on it skipURLs.append(symbolURL) continue return True, data if config['retryDelayMs']: time.sleep(config['retryDelayMs'] / 1000) logger.log(logLevel.DEBUG, "Retrying download of {}/{}/{}".format(libName, breakpadId, symbolFilename)) logger.log(logLevel.DEBUG, "Unable to download {}/{}/{}".format(libName, breakpadId, symbolFilename)) return False, ""
def parseReviewer(file, user): restaurant_tree = {} page = 1 list = [ "0 0 0 0.5 0.5 0 1 0 0 0 ", "0 1 0 0.5 0.5 0 1 0 0 0 ", "0 2 0 0.5 0.5 0 1 0 0 0 ", "0 3 0 0.5 0.5 0 1 0 0 0 ", "0 4 0 0.5 0.5 0 1 0 0 0 ", "0 5 0 0.5 0.5 0 1 0 0 0 " ] while (True): r = requests.get( "https://tabelog.com/rvwr/" + user + "/reviewed_restaurants/list/?bookmark_type=1&sk=&sw=&Srt=D&SrtT=mfav&review_content_exist=0&PG=" + str(page)) contents = r.text #file = open(file, 'r', encoding='utf-8') #fileContents = file.read() result = process(contents, restaurant_tree, list, page) page += 1 Logger.log( "ParseReviewer", "Page [" + str(page) + "]. Results Found: [" + str(result) + "]") if (result < 20): break writeFile = open("data\\" + user + ".json", 'w', encoding="utf-8") writeFile.write( json.dumps(restaurant_tree, indent=4, separators=(',', ': '), ensure_ascii=False)) writeFile.close() return result
def mod_dir_owner(self, relative_path, user = None, group = None, operator="unknown"): if not self.is_available(): raise StorLeverError("File system is unavailable", 500) if "." in relative_path or ".." in relative_path: raise StorLeverError("name cannot include . or ..", 400) if relative_path.startswith("/"): raise StorLeverError("name must be a relative path name", 400) path = os.path.join(self.fs_conf["mount_point"], relative_path) if not os.path.exists(path): raise StorLeverError("Share directory not found", 404) umgr = user_mgr() if user is None: uid = -1 else: uid = umgr.get_user_info_by_name(user)["uid"] if group is None: gid = -1 else: gid = umgr.get_group_by_name(group)["gid"] os.chown(path, uid, gid) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "Share directory (%s) owner is changed to (%s:%s)" " by user(%s)" % (path, user, group, operator))
def doDebugWork(self): id, request, response, future = self.workQueue[0] action = request['action'] logger.log(logLevel.INFO, "{} Handling debug action: {}".format(id, action)) if 'libName' in request and 'breakpadId' in request: # Lots of requests require the cache path for a library libName = str(request['libName']) breakpadId = str(request['breakpadId']) symbolFilename = self.getSymbolFileName(libName) relPath = self.getSymbolFileRelPath(libName, breakpadId, symbolFilename) cachePath = os.path.join(config['cachePath'], relPath) if action == "cacheAddRaw": self.cache.evict(cachePath) if self.downloadToCache(libName, breakpadId, symbolFilename, cachePath, saveRaw=True): response['path'] = cachePath else: response['path'] = None elif action == "cacheGet": response['path'] = self.getFile(libName, breakpadId) elif action == "cacheEvict": self.cache.evict(cachePath) response['success'] = True elif action == "cacheExists": response['exists'] = (self.cache.retrieve(cachePath) is not None) else: logger.log(logLevel.ERROR, "{} Invalid action: {}".format(id, action)) response['message'] = "Invalid action" return response
def findCoords(content): result = {} if (content.find("rst-st-closed") > 0): Logger.log("CLOSED", ".") result['status'] = "closed" coordsStart = content.find("center=") + 7 coordsEnd = content.find("&", coordsStart) coords = content[coordsStart:coordsEnd].split(",") try: result['coords'] = { "longitude": float(coords[0]), "latitude": float(coords[1]) } except: result['coords'] = {"longitude": 0.0, "latitude": 0.0} costStart = content.find("class=\"rdheader-budget__price-target\"") + 38 costEnd = content.find("<", costStart) result['cost'] = {} result['cost']['lunch'] = content[costStart:costEnd] costStart = content.find("class=\"rdheader-budget__price-target\"", costEnd) + 38 costEnd = content.find("<", costStart) result['cost']['dinner'] = content[costStart:costEnd] return result
def getNews(chat_id): userLocation = locations.getParseLink('news', chat_id) if ('keyboard' in userLocation): return userLocation if ('link' in userLocation): response = requests.get(userLocation['link']) try: root = ET.fromstring(response.content) except Exception as e: logger.log(e) now = datetime.now() nowStr = now.strftime("%d.%m.%Y %H:%M") newsList = lang["newsListHeader"].format(nowStr, userLocation['name']) count = 0 for child in root.find('channel'): if (child.tag == 'item'): newsList += child.find('title').text newsList += '\n' newsList += child.find('link').text newsList += '\n\n' count += 1 if (count >= 5): break return newsList
async def on_message(message): try: str(message.author.guild.name) except AttributeError: if not(message.author.id == bot.user.id): await message.channel.send("les commande via mp ne sont pas supporter par le bot :/") return if message.content.startswith("bot!"): ctx = await bot.get_context(message) logger.log("cmd", "Lancement du process de :\"" + message.content + "\" par : {} \n".format(message.author.name), ctx) await bot.process_commands(message) logger.log("cmd", "=====Process Termier=====", ctx) conf = config(message.guild.name, "server.json") if not(message.author.guild_permissions.administrator) and int(conf.config["maxOffset"]) != 0 and (len(message.mentions) >= 0 or len(message.role_mentions) >= 0): author_tier = get_max_member_tier(message.author) test = True if len(message.mentions) >= 0: for member in message.mentions: if author_tier + int(conf.config["maxOffset"]) < get_min_member_tier(member): test = False if len(message.role_mentions) >= 0: for role in message.role_mentions: if role.name in conf.config["roles"].keys(): if author_tier + int(conf.config["maxOffset"]) < int(conf.config["roles"][role.name]): test = False if not(test): await message.delete()
def extract_essential_text(self, url): """ Use Goose and newspaper library to extract the content of the article specified by the given url. :return: a list of strings that represent the content of the article """ try: html = self.retrieve_raw_html(url) article_goose = self.goose.extract(raw_html=html) text = article_goose.cleaned_text # If Goose is unable to extract the article content, try newspaper if text == "": article = Article(url, language="zh") article.download(input_html=html) article.parse() # If newspaper is unable to extract the content of the article, # return the title of the page text = article.text if article.text != "" else article.title lines = [ line.strip() for line in text.splitlines() if line.strip() ] logger.log("Extracted text from url {}".format(url)) return lines except Exception as exception: raise exception
def scan(): """scan for subtitles in a given path json: { "languages": "eng, nld", "path":"/path", "age": 14 } age = in days """ if request.json: mydata = request.json else: log(NO_DATA_RECEIVED) if not 'path' in mydata or not mydata['path']: return error(NO_PATH_PROVIDED, 406) if not 'languages' in mydata or not mydata['languages']: mydata['languages'] = parse_languages("eng") if not 'age' in mydata or not mydata['age']: mydata['age'] = 14 executor.submit(scan_folder_for_subs, mydata['path'], mydata['age'], mydata['languages']) return 'Scan has started in the background'
async def send_about(ctx): log(ctx, 'about command') embed = Embed(title='디시콘 핫산', description='디시콘을 디스코드에서 쓸 수 있게 해주는 디스코드 봇입니다.', color=hassan_env.EMBED_COLOR) embed.add_field(name='Repository', value='https://github.com/dldhk97/dccon_hassan', inline=False) await ctx.send(embed=embed)
async def shopList(ctx,*keyword:str): try : shops=mc.Shops(ctx.guild.name) except json.JSONDecodeError : await ctx.send("aucun shop n'as été trouver on dirais bien !") else: msg="liste des shops trouvé :\n" if len(keyword)!=0: keyword=" ".join(keyword) shops=shops.with_tags(keyword) else: logger.log("cmd","aucun argument trouver",ctx) for c,shop in enumerate(shops) : msg+="{}: Vend :{} {} contre :{} {} |{}\n".format(c,shop.sell.qte,shop.sell.name,shop.buy.qte,shop.buy.name,shop.name) await ctx.send(msg)
async def manual_restore_favorites(ctx, *args): if not len(args) == 2: log(ctx, 'restore_favorites wrong arg count') return await error.send_error_restore_favorite(ctx) download_url = args[1] await favorite_controller.resotre_favorites(ctx, download_url)
async def manual_send_favorite(ctx, *args): if len(args) <= 0: log(ctx, 'send_favorite wrong arg count') return await error.send_error_send_favorite(ctx) keyword = combine_words(args) await favorite_controller.send_favorite(ctx, keyword)
def scrape_locations(self): chrome_options = Options() chrome_options.headless = True # detected in headless mode driver = webdriver.Chrome(executable_path=config('DRIVER_PATH'), options=chrome_options) access_denied = self.navigate_to_availability_page(driver) if access_denied: logger.log("walgreens access denied") self.should_update_availability = False self.locations = [] driver.close() return try: search_button = self.wait_for_search_button(driver) self.random_sleep(2000, 5000, 500) self.enter_zip_code(driver) self.click_random_interval(search_button, driver) appointments_available = self.validate_availability(driver) if appointments_available: self.locations = [Location(self.name, self.get_global_booking_link(), datetime.now(), [AvailabilityWindow(1, datetime.now())])] except Exception as e: logger.log("Walgreens scrape error\n {}".format(driver.page_source[:1000])) finally: driver.close()
def main(): banner() options = cmd_parser() doujinshi_ids = [] doujinshi_list = [] if options.keyword: doujinshis = search_parser(options.keyword, options.page) print_doujinshi(doujinshis) if options.is_download: doujinshi_ids = map(lambda d: d['id'], doujinshis) else: doujinshi_ids = options.ids if doujinshi_ids: for id in doujinshi_ids: doujinshi_info = doujinshi_parser(id) doujinshi_list.append(Doujinshi(**doujinshi_info)) else: raise SystemExit if options.is_download: downloader = Downloader(path=options.saved_path, thread=options.threads, timeout=options.timeout) for doujinshi in doujinshi_list: doujinshi.downloader = downloader doujinshi.download() else: map(lambda doujinshi: doujinshi.show(), doujinshi_list) logger.log(15, u'🍺 All done.')
def quota_group_set(self, group, block_softlimit=0, block_hardlimit=0, inode_softlimit=0, inode_hardlimit=0, operator="unknown"): if not self.is_available(): raise StorLeverError("File system is unavailable", 500) setquota_agrs = [ SETQUOTA_BIN, "-g", group, str(block_softlimit), str(block_hardlimit), str(inode_softlimit), str(inode_hardlimit), self.fs_conf["mount_point"] ] check_output(setquota_agrs) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "File System(%s) quota for group(%s) is changed to " "(%d,%d,%d,%d)" " by user(%s)" % (self.name, group, block_softlimit, block_hardlimit, inode_softlimit, inode_hardlimit, operator))
def inner(self, *args, **kwargs): if self.google_service.client.key is None: logger.log( "'predict' command cannot be executed until an API_KEY is present", "warning", False) else: return func(self, *args, **kwargs)
def do_log(self, arg): """Performs actions based on the arguments given: > log clear : clears all the log entries > log print all : prints all the content in the log (use with caution as the size of the log might be large) > log print <int>: prints the last <int> of lines of the log file""" args = arg.split() def print_content(): lines = logger.get_all_content() try: num_lines = len(lines) if args[1] == "all" else int(args[1]) for i, line in enumerate(lines[-num_lines:]): print("{:>5} {}".format(num_lines - i, line)) except IndexError: logger.log( "Please enter a second argument for the 'log' command", "error", False) except ValueError: logger.log( "Please enter a valid integer to represent the number of lines", "error", False) actions = dict(clear=logger.clear_log, print=print_content) try: actions[args[0]]() except KeyError: logger.log("Command 'log {}' not supported".format(arg), "error", False)
def inner(self, *args, **kwargs): if self.fund_obj is None: logger.log( "Fund has not been set yet. Use 'set <fund_code>' to specify the fund.", "warning", False) else: return func(self, *args, **kwargs)
def do_article(self, arg): """In order for a news articles to be cached, 'predict' command needs to be run first. Performs actions based on the arguments given: > article view <int> : print out the content of the article which has the index specified by <int> > article list all : lists the title and url of all the cached articles during analysis > article list <stock_name>: lists the title and url of cached articles on the stock specified by <stock_name> > article clear : clears all the cached news articles""" args = arg.split() def view_article(): try: index = int(args[1]) if index == 0: raise ValueError _, stock_name, title, url = logger.get_all_articles()[index - 1] logger.log("{}\n{}\n{}".format( stock_name, url, logger.search_article_content(url)), quiet=False) except IndexError: logger.log( "Please enter the index of the article which you wish to view", "error", False) except ValueError: logger.log( "The index of the article must be an integer greater than 0.", "error", False) def list_articles(): try: arg2 = args[1] stock_names = logger.get_cached_stock_names() if arg2 != "all" and arg2 not in stock_names: logger.log( "There is no cached news articles on {}".format(arg2), "error", False) return stock_names = stock_names if arg2 == "all" else [arg2] all_articles = logger.get_all_articles() for name in stock_names: logger.log("Stock: {}".format(name), quiet=False) for i, _, title, url in filter(lambda t: t[1] == name, all_articles): logger.log("{}. {}: {}".format(i, title, url), quiet=False) except IndexError: logger.log( "Please enter a second argument for 'article list' command", "error", False) actions = dict(view=view_article, list=list_articles, clear=logger.clear_article_log) try: command = args[0] actions[command]() except KeyError: logger.log("Command 'article {}' not supported".format(arg), "error", False)
def print_dojinshi(dojinshi_list): if not dojinshi_list: return logger.log(15, 'Print dojinshi list') print('-' * 60) for dojinshi in dojinshi_list: print(dojinshi['id'], '-', dojinshi['title']) print('-' * 60)
def print_dojinshi(dojinshi_list): if not dojinshi_list: return logger.log(15, "Print dojinshi list") print("-" * 60) for dojinshi in dojinshi_list: print(dojinshi["id"], "-", dojinshi["title"]) print("-" * 60)
def doujinshi_parser(id_): if not isinstance(id_, (int, )) and (isinstance(id_, (str, )) and not id_.isdigit()): raise Exception('Doujinshi id({}) is not valid'.format(id_)) id_ = int(id_) logger.log(15, 'Fetching doujinshi information of id {}'.format(id_)) doujinshi = dict() doujinshi['id'] = id_ url = '{}/{}/'.format(constant.DETAIL_URL, id_) try: response = request('get', url).content except Exception as e: logger.critical(str(e)) sys.exit() html = BeautifulSoup(response) doujinshi_info = html.find('div', attrs={'id': 'info'}) title = doujinshi_info.find('h1').text subtitle = doujinshi_info.find('h2') doujinshi['name'] = title doujinshi['subtitle'] = subtitle.text if subtitle else '' doujinshi_cover = html.find('div', attrs={'id': 'cover'}) img_id = re.search('/galleries/([\d]+)/cover\.(jpg|png)$', doujinshi_cover.a.img['src']) if not img_id: logger.critical('Tried yo get image id failed') sys.exit() doujinshi['img_id'] = img_id.group(1) doujinshi['ext'] = img_id.group(2) pages = 0 for _ in doujinshi_info.find_all('div', class_=''): pages = re.search('([\d]+) pages', _.text) if pages: pages = pages.group(1) break doujinshi['pages'] = int(pages) # gain information of the doujinshi information_fields = doujinshi_info.find_all('div', attrs={'class': 'field-name'}) needed_fields = ['Characters', 'Artists', 'Language', 'Tags'] for field in information_fields: field_name = field.contents[0].strip().strip(':') if field_name in needed_fields: data = [ sub_field.contents[0].strip() for sub_field in field.find_all('a', attrs={'class': 'tag'}) ] doujinshi[field_name.lower()] = ', '.join(data) return doujinshi
def group_del_by_name(self, name, user="******"): if name == "root": raise StorLeverError("cannot del group root", 400) cmds = ["/usr/sbin/groupdel"] cmds.append(name) check_output(cmds, input_ret=[2, 6, 8]) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "System group %s is deleted by user(%s)" % (name, user))
async def send_favorite(ctx, keyword): log(ctx, 'send_favorite command') try: package_name, idx = favorite_read_service.find_favorite_one( ctx, keyword) await dccon_controller.send_dccon(ctx, package_name, idx) except FavoriteError as e: await sender.send(ctx, str(e))
def runServer(): logger.configure(path=config["log"]["path"], level=config["log"]["level"], maxFiles=config["log"]["maxFiles"], maxFileBytes=config["log"]["maxFileSizeMB"] * 1024 * 1024) logger.log(logLevel.INFO, "Configuration loaded: {}".format(config)) app = tornado.web.Application([(r"/", RequestHandler)]) app.listen(config['port']) tornado.ioloop.IOLoop.current().start()
def reset_favorites(ctx): user_id = str(ctx.author.id) file_path = os.path.join(hassan_env.FAVORITE_PATH, user_id) + '.txt' if os.path.exists(file_path): os.remove(file_path) log(ctx, 'reset_favorites reset ' + user_id + '\'s favorites') return '<@' + user_id + '>님의 즐겨찾기 목록을 리셋했습니다.'
async def manual_search_favorite(ctx, *args): log(ctx, 'search_favorite command') if len(args) < 2: log(ctx, 'search_favorite wrong arg count') return await error.send_error_search_favorite(ctx) keyword = combine_words(args[1:]) await favorite_controller.serach_favorites(ctx, keyword)
def __init__(self): self.converter = html2text.HTML2Text() self.goose = Goose({"stopwords_class": StopWordsChinese}) self.converter.ignore_links = True self.converter.ignore_emphasis = True self.converter.ignore_tables = True self.converter.ignore_images = True self.converter.unicode_snob = True logger.log("HTML text extractor initialized successfully")
def _copy_file_list(src, dst, file_list): for f in file_list: src_abs_path = os.path.join(src, f) dst_abs_path = os.path.join(dst, f) os.makedirs(ntpath.dirname(src_abs_path), exist_ok=True) os.makedirs(ntpath.dirname(dst_abs_path), exist_ok=True) logger.log(TRACE, 'Copying %s to %s' % (src_abs_path, dst_abs_path)) shutil.copy2(src_abs_path, dst_abs_path)
def group_add(self, name, gid=None, user="******"): cmds = ["/usr/sbin/groupadd"] if gid is not None: cmds.append("-g") cmds.append("%d" % int(gid)) cmds.append(name) check_output(cmds, input_ret=[2, 3, 4, 9]) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "New system group %s is created by user(%s)" % (name, user))
def get(self): ''' TODO: parameterize 'algorithmic' and 'q-learning' Queries ''' logger.log('/stats/GET') algorithmic_record = self.trading_record_registry['algorithmic'] q_learning_record = self.trading_record_registry['q-learning'] return json.dumps({ 'algorithmic': algorithmic_record.serialize(), 'q-learning': q_learning_record.serialize(), })
async def manual_add_favorite(ctx, *args): if not len(args) == 4: log(ctx, 'add_favorite wrong arg count') return await error.send_error_add_favorite(ctx) keyword = args[1] package_name = args[2] dccon_name = args[3] await favorite_controller.add_favorite(ctx, keyword, package_name, dccon_name)
def new_map(self): counter = 10 while not self.client_manager.new_map(): logger.log("Failed to create new map", break_after=True, color="error") time.sleep(1) self.client_manager.free_map() time.sleep(1) counter -=1 if counter == 0 : sys.exit(1) time.sleep(1)
def system_restore(self, user="******"): # call the register callback function for system_restore for callback in self.system_restore_cb: callback() self._clear_conf_dir() # invoke the other module's interface to restore logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "Storlever system is totally restored by user(%s)" % user)
async def shopRemove(ctx,nb:str): logger.log("cmd","\trécupération des shops existant",ctx) shops=mc.Shops(ctx.guild.name) if ctx.message.author.name in shops.dictionary.keys(): logger.log("cmd","\trécupération des shops du joueur",ctx) playerShops=mc.Shops(_dict={ctx.message.author.name:shops.dictionary[ctx.message.author.name]}) if nb != "l": try: nb=int(nb) except ValueError: if "*" != nb : commands.BadArgument() else: logger.log("cmd","\tsuppression de tout les shops du joueur",ctx) for shop in playerShops.shops: shops.suppr(shop) shops.dump(ctx.guild.name) await ctx.send("tout vos shop on été supprimé !") return logger.log("cmd","\tsuppression du shop demander par le joueur",ctx) shops.suppr(playerShops[nb]) shops.dump(ctx.guild.name) await ctx.send("le shop n° {} a été suprimer".format(nb)) else: logger.log("cmd","\taffichage des shops du joueur",ctx) msg="Vos shops :\n" for c,shop in enumerate(playerShops): msg+="{}: Vend :{} {} contre :{} {} \n".format(c,shop.sell.qte,shop.sell.name,shop.buy.qte,shop.buy.name) await ctx.send(msg) else: await ctx.send("vous n'avez encore aucun shop O.o")
def doujinshi_parser(id_): if not isinstance(id_, (int,)) and (isinstance(id_, (str,)) and not id_.isdigit()): raise Exception('Doujinshi id({}) is not valid'.format(id_)) id_ = int(id_) logger.log(15, 'Fetching doujinshi information of id {}'.format(id_)) doujinshi = dict() doujinshi['id'] = id_ url = '{}/{}/'.format(constant.DETAIL_URL, id_) try: response = request('get', url).content except Exception as e: logger.critical(str(e)) sys.exit() html = BeautifulSoup(response) doujinshi_info = html.find('div', attrs={'id': 'info'}) title = doujinshi_info.find('h1').text subtitle = doujinshi_info.find('h2') doujinshi['name'] = title doujinshi['subtitle'] = subtitle.text if subtitle else '' doujinshi_cover = html.find('div', attrs={'id': 'cover'}) img_id = re.search('/galleries/([\d]+)/cover\.(jpg|png)$', doujinshi_cover.a.img['src']) if not img_id: logger.critical('Tried yo get image id failed') sys.exit() doujinshi['img_id'] = img_id.group(1) doujinshi['ext'] = img_id.group(2) pages = 0 for _ in doujinshi_info.find_all('div', class_=''): pages = re.search('([\d]+) pages', _.text) if pages: pages = pages.group(1) break doujinshi['pages'] = int(pages) # gain information of the doujinshi information_fields = doujinshi_info.find_all('div', attrs={'class': 'field-name'}) needed_fields = ['Characters', 'Artists', 'Language', 'Tags'] for field in information_fields: field_name = field.contents[0].strip().strip(':') if field_name in needed_fields: data = [sub_field.contents[0].strip() for sub_field in field.find_all('a', attrs={'class': 'tag'})] doujinshi[field_name.lower()] = ', '.join(data) return doujinshi
def user_mod(self, name, password=None, uid=None, primary_group=None, groups=None, home_dir=None, login=None, comment=None, user="******"): if name == "root": raise StorLeverError("cannot modify user root", 400) cmds = ["/usr/sbin/usermod"] if uid is not None: cmds.append("-u") cmds.append("%d" % int(uid)) if primary_group is not None: cmds.append("-g") cmds.append(primary_group) if groups is not None: cmds.append("-G") cmds.append(groups) if comment is not None: cmds.append("-c") cmds.append(comment) if password is not None: cmds.append("-p") enc_passwd = crypt(password, "ab") cmds.append(enc_passwd) if login is not None: cmds.append("-s") if not login: cmds.append(NO_LOGIN_SHELL) else: cmds.append(LOGIN_SHELL) if home_dir is not None: cmds.append("-d") cmds.append(home_dir) if not os.path.exists(home_dir): cmds.append("-m") cmds.append(name) if len(cmds) > 2: check_output(cmds, input_ret=[4, 6, 12]) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "System user %s is modified by user(%s)" % (name, user))
def mod_dir_mode(self, relative_path, mode, operator="unknown"): if not self.is_available(): raise StorLeverError("File system is unavailable", 500) if "." in relative_path or ".." in relative_path: raise StorLeverError("name cannot include . or ..", 400) if relative_path.startswith("/"): raise StorLeverError("name must be a relative path name", 400) path = os.path.join(self.fs_conf["mount_point"], relative_path) if not os.path.exists(path): raise StorLeverError("Share directory not found", 404) os.umask(0) os.chmod(path, mode) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "Share directory (%s) mode is changed to 0%o" " by user(%s)" % (path, mode, operator))
def test(self): """ login webqq """ self._verifycode = self._getverifycode() self.pswd = self._preprocess(self._pw, self._verifycode) self._headers.update({"Referer":"http://ui.ptlogin2.qq.com/cgi-bin/login?target=self&style=5&mibao_css=m_webqq&appid=%s"%(self.appid)+"&enable_qlogin=0&no_verifyimg=1&s_url=http%3A%2F%2Fweb.qq.com%2Floginproxy.html&f_url=loginerroralert&strong_login=1&login_state=10&t=20121029001"}) url = "http://ptlogin2.qq.com/login?u=%s&p=%s&verifycode=%s&aid=%s"%(self.qq,self.pswd,self._verifycode[1],self.appid)\ + "&u1=http%3A%2F%2Fweb.qq.com%2Floginproxy.html%3Flogin2qq%3D1%26webqq_type%3D10&h=1&ptredirect=0&ptlang=2052&from_ui=1&pttype=1&dumy=&fp=loginerroralert&action=3-25-30079&mibao_css=m_webqq&t=1&g=1" res = self._request(url=url) if res.find("登陆成功") != -1: logger.log("登陆成功") elif res.find("验证码不正确") != -1: logger.error("验证码错误") self._getverifycode() self.test() else: logger.error(res)
def restore_from_file(self, filename, user="******"): # check input file if not os.path.exists(filename): raise StorLeverError("File (%s) does not exist" % filename, 400) if not tarfile.is_tarfile(filename): raise StorLeverError("File (%s) is not a config archive" % filename, 400) self._del_all_config_files() tar_file = tarfile.open(filename, 'r') tar_file.extractall("/") tar_file.close() # call the register callback function for restore config for callback in self.restore_from_file_cb: callback() logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "Storlever conf is restored from file by user(%s)" % user)
def process_line(self, ircmsg): if is_ping(ircmsg): logger.incoming_ping(ircmsg) pingmsg = ircmsg.split("PING :")[1] self.send("PONG :" + pingmsg + "\n") elif is_message(ircmsg): logger.log(ircmsg) self.process_message(ircmsg) else: logger.incoming_info(ircmsg) # weird hack thing for joining channels? if "End of /MOTD" in ircmsg: for channel in self.channels: self.joinchan(channel) # weird hack thing for nickserv identify if "[email protected]" in ircmsg: if "You are already identified." not in ircmsg: self.sendmsg("NickServ", "IDENTIFY " + self.password)
def delete_dir(self, relative_path, user="******"): # make sure fs is available if not self.is_available(): raise StorLeverError("File system is unavailable", 500) if "." in relative_path or ".." in relative_path: raise StorLeverError("name cannot include . or ..", 400) if relative_path.startswith("/"): raise StorLeverError("name must be a relative path name", 400) path = os.path.join(self.fs_conf["mount_point"], relative_path) if path == self.fs_conf["mount_point"]: raise StorLeverError("Cannot delete the root dir of filesystem", 400) if not os.path.exists(path): raise StorLeverError("Share directory not found", 404) check_output(["/bin/rm", "-rf", path], input_ret=[1]) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "Share directory (%s) is deleted" " by user(%s)" % (path, user))
def user_add(self, name, password=None, uid=None, primary_group=None, groups=None, home_dir=None, login=None, comment=None, user="******"): cmds = ["/usr/sbin/useradd"] if uid is not None: cmds.append("-u") cmds.append("%d" % int(uid)) if primary_group is not None: cmds.append("-g") cmds.append(primary_group) if groups is not None: cmds.append("-G") cmds.append(groups) if comment is not None: cmds.append("-c") cmds.append(comment) if password is not None: cmds.append("-p") enc_passwd = crypt(password, "ab") cmds.append(enc_passwd) if login is not None: cmds.append("-s") if not login: cmds.append(NO_LOGIN_SHELL) else: cmds.append(LOGIN_SHELL) if home_dir is not None: cmds.append("-d") cmds.append(home_dir) cmds.append(name) check_output(cmds, input_ret=[2, 3, 4, 6, 9]) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "New system user %s is created by user(%s)" % (name, user))
all_commands = load_commands() with open("data/password", "r") as password_file: password = password_file.readline() connection = IrcConnection(nick, server) bingo_bot = BingoBot(nick, password, connection, channels, commands=all_commands) last_connection = datetime(year=1999, month=1, day=1) # infinite loop tries to reconnect if disconnected by timeout while True: time_since_last_connection = datetime.now() - last_connection if time_since_last_connection < RETRY_INTERVAL: time.sleep(60) last_connection = datetime.now() try: logger.log("Connecting to server...") bingo_bot.connect() bingo_bot.listen() except Exception as e: logger.error("Encountered exception while running:") logger.error(traceback.format_exc()) logger.error("Will retry within 60 seconds...")
uid = -1 else: uid = umgr.get_user_info_by_name(user)["uid"] if group is None: gid = -1 else: gid = umgr.get_group_by_name(group)["gid"] mount_point = self.fs_conf["mount_point"] path = os.path.join(mount_point, relative_path) os.umask(0) os.makedirs(path, mode) os.chown(path, uid, gid) logger.log(logging.INFO, logger.LOG_TYPE_CONFIG, "Share directory (%s) is created" " by user(%s)" % (path, operator)) def delete_dir(self, relative_path, user="******"): # make sure fs is available if not self.is_available(): raise StorLeverError("File system is unavailable", 500) if "." in relative_path or ".." in relative_path: raise StorLeverError("name cannot include . or ..", 400) if relative_path.startswith("/"): raise StorLeverError("name must be a relative path name", 400) path = os.path.join(self.fs_conf["mount_point"], relative_path) if path == self.fs_conf["mount_point"]: raise StorLeverError("Cannot delete the root dir of filesystem", 400) if not os.path.exists(path):
def _download_callback(self, request, result): if not result: logger.critical('Too many errors occurred, quit.') raise SystemExit logger.log(15, '{} download successfully'.format(result))
def on_binlog_replay_end(): global binlog_done logger.log(logger.debug, "binlog!!!") binlog_done = True _TC_OUTPUT_TEST() return
def on_get_difference_end(): global syncFinished logger.log(logger.debug, "--------- END SYNC ---------") syncFinished = True _TC_OUTPUT_TEST return
def _TC_OUTPUT_TEST(): logger.log(logger.debug, "Testeando cosas:") logger.log(logger.debug, " Colores:") logger.log(logger.debug, " Font:") logger.log(logger.debug, " OSCURO: "+TC.Black+"negro "+TC.Red+"rojo "+TC.Green+"verde "+TC.Yellow+"amarillo "+TC.Blue+"azul "+TC.Purple+"morado "+TC.Cyan+"cyan "+TC.White+"blanco") logger.log(logger.debug, " CLARO : "+TC.IBlack+"negro "+TC.IRed+"rojo "+TC.IGreen+"verde "+TC.IYellow+"amarillo "+TC.IBlue+"azul "+TC.IPurple+"morado "+TC.ICyan+"cyan "+TC.IWhite+"blanco") logger.log(logger.debug, " Background:") logger.log(logger.debug, " OSCURO: "+TC.OnBlack+"negro "+TC.OnRed+"rojo "+TC.OnGreen+"verde "+TC.OnYellow+"amarillo "+TC.OnBlue+"azul "+TC.OnPurple+"morado "+TC.OnCyan+"cyan "+TC.OnWhite+"blanco") logger.log(logger.debug, " CLARO : "+TC.OnIBlack+"negro "+TC.OnIRed+"rojo "+TC.OnIGreen+"verde "+TC.OnIYellow+"amarillo "+TC.OnIBlue+"azul "+TC.OnIPurple+"morado "+TC.OnICyan+"cyan "+TC.OnIWhite+"blanco") logger.log(logger.debug, " Estilos:") logger.log(logger.debug, " "+TC.Rst+TC.Bold+"Bold "+TC.Rst+TC.Undr+"Undr "+TC.Rst+TC.Inv+"Inv "+TC.Rst+TC.Reg+"Reg "+TC.Rst+TC.RegF+"RegF "+TC.Rst+TC.RegB+"RegB") logger.log(logger.debug, str(sys.version_info)) return