def play_game(GameClass, isChallenger, *args): newGame = GameClass(isChallenger, *args) logging.info('Game started') newGame.play() newGame.game_over() settings.gameThread = None logging.info('Game finished')
def remove_all_images(cls): logging.info('REMOVING IMAGES') r = True for i in cls.get_images(): if not cls.remove_image(i): r = False return r
def main(): records = [] filepath = os.path.join(JSONDIR, 'port') records0 = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'portshow') records1 = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'swport_alias') records2 = load_data(filepath, []) for record0 in records0: switch = record0['Switch'] uPort = record0['uPort'] Index = record0['Index'] record = {} for key in ['Switch', 'uPort', 'Index', 'Speed', 'State', 'Type']: record[key] = record0[key] for record1 in records1: if switch == record1['Switch'] and uPort == record1['uPort']: record['portWwn_of_devices_connected'] = record1['portWwn_of_devices_connected'] record['portName'] = record1['portName'] for record2 in records2: if record2['Swport'] == '%s %s' %(switch, Index): record['Aliases'] = record2['Aliases'] records.append(record) dump_data(os.path.join(JSONDIR, 'port_common'), records) logging.info('%s | %s records' %('path', len(records))) return
def main(): for connection in CONNECTIONS: if connection['model'] == 'EVA': system = connection['name'] args = [connection[key] for key in ['address', 'username', 'password']] connect_line = 'SELECT MANAGER %s USERNAME=%s PASSWORD=%s' %tuple(args) script = tempfile.NamedTemporaryFile() cli_commands = [ connect_line, 'select system %s' %system, 'exit'] script.write(bytes('\n'.join(cli_commands), 'utf-8')) script.seek(0) p = subprocess.Popen([EVA_CLI, 'file %s' %script.name], stdout=subprocess.PIPE) out = p.stdout.read().decode("utf-8") p.communicate() err = ''.join([line for line in out.split('\n') if 'Error' in line]) if err: logging.warning('%s test failed - %s' %(system, err)) else: logging.info('%s test success' %system) for dirname in ['logs', 'cache']: dirpath = os.path.join(os.getcwd(), dirname) if os.path.isdir(dirpath): shutil.rmtree(dirpath) return
def delay_up(self): if self.state_init: self._delay += 1 logging.info('DELAY UP: ' + str(self._delay)) else: logging.info('STATE WAS NOT INIT, leaving delay at ' + str(self._delay))
def callback_handler(call): logging.debug(f'call.data {type(call.data)} = {call.data}') view_class, model_class = views.decode_data(call.data) if call.message.chat.id not in user_sessions: logging.info(f'Clearing session {call.message.chat.id}') user_sessions[call.message.chat.id] = {} if model_class is not None: user_sessions[call.message.chat. id][f'{type(model_class).__name__}'] = model_class view = view_class() if isinstance(view, views.GetBusSchedule): if 'Destinations' not in user_sessions[call.message.chat.id]: view = views.SelectDestination() elif 'Locations' not in user_sessions[call.message.chat.id]: view = views.SelectLocation() elif 'Destinations' in user_sessions[ call.message.chat.id] and 'Locations' in user_sessions[ call.message.chat.id]: view = views.ShowSheduleResult(user_sessions[call.message.chat.id]) del user_sessions[call.message.chat.id]['Destinations'] del user_sessions[call.message.chat.id]['Locations'] else: view = views.StartMenu() bot.edit_message_text(chat_id=call.message.chat.id, message_id=call.message.message_id, text=view.get_message_text(), reply_markup=view.get_keyboard())
def state(self, st): if st is not None: if self._state != st: logging.info('STATE SWITCHED ' + str(st)) self._delay = 0 self._state = st self._state_init = False
def crawl_transaction_by_search(self, args): """ 依据商圈或小区 爬取一页历史成交房源 """ search_key, page = args url_page = self.base_url + f"chengjiao/pg{page}rs{search_key}/" content = self.request_fn(url_page) soup = BeautifulSoup(content, self.bs4_parser) logging.debug('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format(search_key, page, url_page)) session = DBSession() for ul_tag in soup.find_all("ul", class_="listContent"): for item_tag in ul_tag.find_all("li"): try: info_dict = self.parse_transaction_content(item_tag) query = session.query(TransactionInfo).filter(TransactionInfo.id == info_dict['id']) if query.first(): query.update(info_dict) else: session.add(TransactionInfo(**info_dict)) session.commit() logging.debug('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format( search_key, page, info_dict)) except Exception as e: logging.exception('@crawl_transaction_by_search: {0} - page - {1}: {2}'.format( search_key, page, e)) time.sleep(3) logging.info('@crawl_transaction_by_search: {0} - page - {1} complete.'.format(search_key, page))
def main(): os.environ['LD_LIBRARY_PATH'] = os.path.join(HDS_CLI, 'lib') os.environ['STONAVM_HOME'] = HDS_CLI for connection in CONNECTIONS: if connection['model'] == 'HDS': systemname = connection['name'] address = connection['address'] username = connection['username'] password = connection['password'] clicommand = '%s/auunitadd -unit %s -ctl0 %s' %(HDS_CLI, systemname, address) p = subprocess.Popen(clicommand.split(), stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = p.stdout.read().decode("utf-8") p.communicate() authcommand = "printf %s\\n%s" %(username, password) clicommand = "%s/%s -unit %s" %(HDS_CLI, 'auunitinfo', systemname) auth = subprocess.Popen(authcommand.split(), stdout=subprocess.PIPE) p = subprocess.Popen( clicommand.split(), stdin=auth.stdout, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out = p.stdout.read().decode("utf-8") err = p.stderr.read().decode("utf-8").strip() p.communicate() if not out and err: logging.warning('%s test failed - %s' %(systemname, err.split('\n')[-1])) else: logging.info('%s test success' %systemname) return
def crawl_sale_by_search(self, args): """ 根据商圈或社区爬取一页在售房源 """ search_key, page = args url_page = self.base_url + f"ershoufang/pg{page}rs{search_key}/" content = self.request_fn(url_page) soup = BeautifulSoup(content, self.bs4_parser) logging.debug('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, url_page)) session = DBSession() for ul_tag in soup.find_all("ul", class_="sellListContent"): for item_tag in ul_tag.find_all("li"): try: info_dict = self.parse_sale_content(item_tag) logging.debug('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, info_dict)) sale_info = SaleInfo(**info_dict) if not sale_info.house_id or not sale_info.community_id or not sale_info.district: continue session.add(sale_info) except Exception as e: session.rollback() logging.exception('@crawl_sale_by_search: {0} - page - {1}: {2}'.format(search_key, page, e)) time.sleep(3) session.commit() session.close() logging.info('@crawl_sale_by_search: {0} - page - {1} complete.'.format(search_key, page))
def run_spider(): """ 运行爬虫 :return: """ # drop_db() init_db() logging.info("Spider start ... city: {0}, districts: {1}".format(CITY, ','.join(DISTRICTS))) spider = LianJiaSpider( city=CITY, districts=DISTRICTS, ) # 爬取所有小区信息(首次必须) spider.crawl_district_pool(module='community_info') # 爬取在售房源: 3种方式 # 1. 按照地区爬取 # spider.crawl_district_pool(module='sale_info') # 2. 按照商圈爬取(推荐) biz_circles = spider.query_biz_circle(districts=DISTRICTS_CN) spider.set_request_params(max_workers=3, delay=0.5) # 限速 spider.crawl_search_pool(module='sale_info', collection=biz_circles, coll_start=1) # 3. 按照社区爬取 # communities = spider.query_community(biz_circle=biz_circles) # spider.crawl_search_pool(module='sale_info', collection=communities) # 爬取历史成交 spider.set_request_params(max_workers=1, delay=3) # 限速 spider.crawl_search_pool(module='transaction_info', collection=biz_circles, coll_start=1) # spider.crawl_search_pool(module='transaction_info', collection=communities) logging.info("Spider finished ...")
def _run(self): debug = 0 counter = 0 dev_cache = [] momentum = 0 dev_m = 0 cache_len = 0 while self.camera.cap.isOpened(): counter += 1 try: start = time.time() deviation = self.camera.analyze() # logging.debug(f'deviation: {deviation}') # self._pilot(deviation) if len(dev_cache) == 0: avg_deviation = deviation if abs(deviation - avg_deviation) < 20: dev_cache.append(deviation) if len(dev_cache) >= 3: counter = 0 end = time.time() delta = end - start logging.debug(f'analyze 3 times: {delta:.3f}s') avg_deviation = sum(dev_cache) / len(dev_cache) dev_m = (0.3 * dev_m + avg_deviation) / 1.3 logging.info(f'deviation momentum: {dev_m}') self._pilot(dev_m) except KeyboardInterrupt as e: counter = 0 dev_cache = [] time.sleep(20) break
def request_data(url, retry=0, auto_proxy=False, delay=0, **kwargs): """ Get请求爬取源代码 :param url: 目标网站 :param retry: 是否重试 :param auto_proxy: 是否使用代理ip :param delay: 延迟时间 :param kwargs: requests.get参数 :return: text """ if delay: time.sleep(delay) if retry: sess = requests.Session() sess.mount('http://', HTTPAdapter(max_retries=retry)) sess.mount('https://', HTTPAdapter(max_retries=retry)) method = functools.partial(sess.request, method='get') else: method = requests.get if auto_proxy: kwargs.update({'proxies': {'http': 'http://{}'.format(get_proxy())}}) try: res = method(url=url, headers=get_header(), **kwargs) if res.status_code == 200: logging.debug("Request Data - {0} - {1}".format( res.status_code, url)) return res.text logging.info("Request Data - {0} - {1}".format(res.status_code, url)) except requests.exceptions.RequestException as e: logging.error("Request ERROR: {0}, url: {1}".format(e, url))
def set_image(self, key, mid): logging.info('SETTING IMAGE ' + str(key) + ' TO ' + str(mid)) if mid: if key in self.images: DialogsApi.remove_image(self.images[key]) self.images[key] = mid logging.info('NEW IMAGES ' + log_object(self.images))
def crawl_community_by_district(self, args): """ 根据区县爬取一页小区信息 """ district, page = args url_page = self.base_url + f"xiaoqu/{district}/pg{page}/" content = self.request_fn(url_page) soup = BeautifulSoup(content, self.bs4_parser) logging.debug('@crawl_community_by_district: {0} - page - {1}: {2}'.format(district, page, url_page)) session = DBSession() for ul_tag in soup.find_all("ul", class_="listContent"): for item_tag in ul_tag.find_all("li"): try: info_dict = self.parse_community_content(item_tag) query = session.query(CommunityInfo).filter(CommunityInfo.id == info_dict['id']) if query.first(): query.update(info_dict) else: session.add(CommunityInfo(**info_dict)) session.commit() logging.debug('@crawl_community_by_district: {0} - page - {1}: {2}'.format(district, page, info_dict)) except Exception as e: session.rollback() logging.exception('@crawl_community_by_district: {0} - page - {1}: {2}'.format(district, page, e)) time.sleep(3) session.close() logging.info('@crawl_community_by_district: {0} - page - {1} complete.'.format(district, page))
def remove_image(mid): logging.info('REMOVING IMAGE ' + str(mid)) resp = requests.delete( DIALOGS_API_SKILL_URL + str(mid).strip('/'), headers={'Authorization': 'OAuth {}'.format(OAuth)}) log_request(resp) resp = resp.json() return 'result' in resp and resp['result'] == 'ok'
def detect_front_distance(self): #舵机旋转到90度,即前方,测距 self.servo_steer(90) time.sleep(0.8) front_distance = self.detect_distance() logging.info("Ultrasonic detecting ... \nFront distance: %d " % front_distance) return front_distance
def detect_left_distance(self): #舵机旋转到180度,即左侧,测距 self.servo_steer(180) time.sleep(0.8) left_distance = self.detect_distance() logging.info("Ultrasonic detecting ... \nLeft distance: %d " % left_distance) return left_distance
def detect_right_distance(self): #舵机旋转到0度,即右侧,测距 self.servo_steer(0) time.sleep(0.8) right_distance = self.detect_distance() logging.info("Ultrasonic detecting ... \nRight distance: %d " % right_distance) return right_distance
def export_date_to_html_file(import_date: datetime, export_html: str): export_folder = Path(export_html) paragraph = f"🗓️ Ostatnia aktualizacja danych: {import_date}" with open(export_folder, "w", encoding="utf-8") as f: f.write(paragraph) logging.info(f"Finish: Statistics saved to .html file: {export_folder}.")
def __init__(self, name: str = 'BasicElement', comments: str = '', is_enabled: bool = True): logging.info(f'{type(self).__name__} | Init started') self.name = name self.comments = comments self.is_enabled = is_enabled
def exit(): if settings.mode == Mode.SERVER: print(r'You should close your server first by typing "\close".') return elif settings.mode == Mode.CLIENT: print(r'You should shut down the connection first by typing "\quit".') return logging.info('Got an exit command') settings.mode = Mode.CLOSE
def create_db(conn): logging.info("Creating database") create_schema = """ create table data ( key text primary key, value text, date datetime default current_timestamp ); """ conn.executescript(create_schema)
def stop_connection(): logging.info('Current connected server: %s:%s' % settings.tClient.get_connected_addr()) if settings.mode != Mode.CLIENT: print('You are not in CLIENT mode.') return settings.mode = Mode.NORMAL settings.tClient.quit() settings.tClient = None logging.info('Connection closed')
def osm_tag_statistics(tag: str, source_db: str, col: str) -> list: client = MongoClient(uri) db = client[source_db] features = db[col] all_documents = features.count_documents({}) query_aggregate = list( features.aggregate([ { "$match": { "keywords": { "$not": { "$size": 0 } } } }, { "$unwind": f"$properties.tags.{tag}" }, { "$group": { "_id": { "$toLower": f"$properties.tags.{tag}" }, "count": { "$sum": 1 }, } }, { "$match": { "count": { "$gte": 1 } } }, { "$sort": { "count": -1 } }, { "$limit": 100 }, ])) not_empty_tag = sum(elem["count"] for elem in query_aggregate) empty_tag = all_documents - not_empty_tag query_aggregate.append({"_id": "brak", "count": empty_tag}) query_aggregate.append({ "_id": "<strong>suma</strong>", "count": all_documents }) logging.info(f"Finish: Statistics for tag: {tag} generated.") return query_aggregate
def compare_acs(self): try: logging.info( f'{__class__.__name__ } [Successfully created {self.image_name} graph' ) except Exception as e: logging.exception( f'{__class__.__name__ } [Exception during creation of {self.image_name} graph' ) logging.exception(f'{__class__.__name__ } [Exception: {e}', exc_info=1)
def main(): for connection in CONNECTIONS: try: data = get_data(connection['address']) Name = data['RIMP']['INFRA2']['ENCL'], SN = data['RIMP']['INFRA2']['ENCL_SN'], Location = data['RIMP']['INFRA2']['RACK'] logging.info('{} test success ({} {} {})'.format(connection['name'], Name, SN, Location)) except: logging.warning('%s test failed' %connection['name']) return
def callback(user=user, request=user.request): user.state = 1 user.init_state(True) user.request = request logging.info( 'continue after position recognition ' + log_object({ 'text': user.text, 'pos': user['position'], 'state': (user.state, user.delay) }))
def main(): for connection in CONNECTIONS: if connection['model'] == '3PAR': args = [connection[key] for key in ['name', 'address', 'username', 'password']] args.append([]) systemname, outs, errs, exception = ssh_run(args) if exception: logging.warning('%s test failed - %s' %(systemname, exception)) else: logging.info('%s test success' %systemname) return
def main(): records = [] filepath = os.path.join(JSONDIR, 'sorted_switchnames') sorted_switchnames = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'switch') swi_records = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'serial') ser_records = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'version') ver_records = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'fswitch') fsw_records = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'agswitch') ags_records = load_data(filepath, []) for swi_record in swi_records: switch = swi_record['Switch'] record = { 'Switch': swi_record['Switch'], 'switchType': swi_record['switchType'], 'switchMode': swi_record['switchMode'], 'switchRole': swi_record['switchRole'] if 'switchRole' in swi_record else '', } if record['switchMode'] == 'Access Gateway Mode': record['switchMode'] = 'AG' for records2 in [fsw_records, ags_records]: for record2 in records2: if switch == record2['Switch']: record['Fabric'] = record2['Fabric'] for ser_record in ser_records: if switch == ser_record['Switch']: record.update({ 'Part_Num': ser_record['Part_Num'], 'Serial_Num': ser_record['Serial_Num'], }) for ver_record in ver_records: if switch == ver_record['Switch']: record.update({ 'Fabric_OS': ver_record['Fabric_OS'], }) records.append(record) dump_data(os.path.join(JSONDIR, 'switch_common'), records) logging.info('%s | %s records' %(__name__, len(records))) return
def __first_init(self, req): logging.info('NEW STORAGE INSTANCE ' + str(req['session']['user_id'])) self.request = {} self.response = None self._state = 0 self._state_init = False self._delay = 0 self._id = req['session']['user_id'] self.buttons = [] self.cards = [] self.images = {} self.data = {'position': None, 'next': [], 'back': [], 'context': None}
def upload_image_source(source): logging.info('UPLOADING IMAGE') resp = requests.post(DIALOGS_API_SKILL_URL, files={'file': source}, headers={ 'Authorization': 'OAuth {}'.format(OAuth), }) log_request(resp) resp = resp.json() if 'image' in resp: return resp['image']['id'] return False
def main(): for connection in CONNECTIONS: try: content = get_content(connection['address']) filename = '%s.xmldata' %(connection['name']) filepath = os.path.join(TEXTDIR, filename) with open(filepath, 'w') as f: f.write(content) logging.info('%s | %s lines' %(filename, len(content))) except: logging.warning('%s data collection failed' %connection['name']) return
def upload_image_url(url): logging.info('UPLOADING IMAGE FROM ' + url) resp = requests.post(DIALOGS_API_SKILL_URL, json={'url': url}, headers={ 'Authorization': 'OAuth {}'.format(OAuth), }) logging.info('request finished, got ' + resp.content.decode('utf-8')) log_request(resp) resp = resp.json() if 'image' in resp: return resp['image']['id'] return False
def main(): filepath = os.path.join(JSONDIR, "link") links = load_data(filepath, []) filepath = os.path.join(JSONDIR, "port") ports = load_data(filepath, []) data = f_graph(links, ports) filepath = os.path.join(JSONDIR, "graph") dump_data(filepath, data) logging.info("%s | %s records" % ("graph", len(data))) return
def run(self): try: logging.debug('Start smartcar in mode auto_pilot with camera ...') GpioMgmt().init_pwm() # self.ultrasonic.init_pwm() self._run() except KeyboardInterrupt as e: GpioMgmt().release() logging.info("[+] Exiting") raise e except Exception as e: GpioMgmt().init_pin() logging.error(str(e))
def multi_kpi_vs_time(self, label1, label2): """ Void -> save plot to .graphs/ """ try: new = Reader(self.csvfile, self.columns) logging.info( f'{__class__.__name__ } [{new.__class__.__name__} Processing {self.columns}' ) param = new.read_single_col(self.columns[0]) param2 = new.read_single_col(self.columns[1]) logging.info( f'{__class__.__name__ } [Target KPIs [{self.columns[0]}] [{self.columns[1]}] ' ) logging.info( f'{__class__.__name__ } [Number of KPIs to be drawn - {(len(param)+len(param2))}' ) time = new.read_single_col(self.columns[2]) timestampt = [dateutil.parser.parse(s) for s in time] figure = plt.figure() ax = plt.gca().xaxis.set_major_locator(md.HourLocator(interval=5)) figure, ax = plt.subplots(figsize=(15, 4)) ax.xaxis.set_major_formatter(md.DateFormatter('%d-%m-%Y-%H:%M')) plt.plot_date( x=(timestampt), y=(param), xdate=True, fmt='r', label=label1, ) plt.plot_date( x=(timestampt), y=(param2), xdate=True, fmt='b', label=label2, ) plt.xticks(rotation=40) plt.xticks(timestampt) plt.tight_layout() plt.legend(loc="upper left") plt.subplots_adjust(wspace=1, bottom=0.2) plt.title(self.title, loc='center') ax.tick_params(direction='out', length=1, width=0.5, color='b') figure = plt.gca().xaxis.set_major_locator( md.HourLocator(interval=5)) plt.savefig(f'graphs/{self.image_name}', bbox_inches='tight') logging.info( f'{__class__.__name__ } [Successfully created {self.image_name} graph' ) except Exception as e: logging.exception( f'{__class__.__name__ } [Exception during creation of {self.image_name} graph' ) logging.exception(f'{__class__.__name__ } [Exception: {e}', exc_info=1) finally: plt.clf() plt.close(figure)
def get_nearest_bus(location: Locations, destination: Destinations) -> str: logging.info('Getting nearest bus started...') location_data = None if location == Locations.MARINA_ROSHHA: location_data = 'm' elif location == Locations.PLOSHHAD_ILICHA: location_data = 'p' elif location == Locations.RIZHSKAJA: location_data = 'r' destination_data = None if destination == Destinations.TO_METRO: destination_data = 'to_metro' elif destination == Destinations.TO_OFFICE: destination_data = 'to_office' response = requests.get( f'https://transport.lanit.ru/api/times/{location_data}').json() message_format = f'Сейчас {settings.days[datetime.today().weekday()]} {response["info"]["now"]}\n' \ f'Метро: {location.value}\n' \ f'Куда: {destination.value}\n' if datetime.today().weekday() > 4: logging.debug( f'message_format {type(message_format)} = {message_format}') logging.info('Getting nearest bus completed') message_format += 'Сегодня маршруток не будет' return message_format elif response['time'][destination_data]['nearest'] is not False: message_format += f'Ближайшая маршрутка будет через {response["time"][destination_data]["left"]} ' \ f'в {response["time"][destination_data]["nearest"]}\n' if response["time"][destination_data]["next"] is not False: message_format += f'Следующая будет в {response["time"][destination_data]["next"]}\n' else: message_format += f'Маршруток больше сегодня не будет\n' if response['info']['warning'] is not False: message_format += f"Важно: {response['info'][destination_data]['warning']}" logging.debug( f'message_format {type(message_format)} = {message_format}') logging.info('Getting nearest bus completed') return message_format elif response['time'][destination_data]['nearest'] is False: message_format += f'Сегодня маршруток не будет.\n' if response['info']['warning'] is not False: message_format += f"Предупреждение: {response['info'][destination_data]['warning']}" logging.debug( f'message_format {type(message_format)} = {message_format}') logging.info('Getting nearest bus completed') return message_format else: message_format = 'К сожалению не удалось получить расписание\n' return message_format
def main(): for connection in CONNECTIONS: if connection['model'] == '3PAR': args = [connection[key] for key in ['name', 'address', 'username', 'password']] args = args + [COMMANDS] systemname, outs, errs, exception = ssh_run(args) if exception: logging.warning('%s - %s' %(systemname, exception)) for commandname, out in outs.items(): filename = '%s.%s' %(systemname, commandname) filepath = os.path.join(TEXTDIR, filename) with open(filepath, 'w') as f: f.write(out) logging.info('%s | %s lines' %( filename, len(out.strip().split('\n')))) return
def main(): all_enclosures = [] all_servers = [] all_mezzanines = [] dirpath = os.path.dirname(os.path.realpath(__file__)) filepath = os.path.join(dirpath, 'Connection.json') CONNECTIONS = load_data(filepath, []) name_addr = {con['name']: con['address'] for con in CONNECTIONS} encurls = {} for filename in os.listdir(TEXTDIR): filepath = os.path.join(TEXTDIR, filename) system, command = filename.split('.') with open(filepath) as f: content = f.read() data = xmltodict.parse(content) enclosure, servers, mezzanines = parse(data) all_enclosures += [enclosure] all_servers += servers all_mezzanines += mezzanines url = name_addr.get(system) encurls[enclosure['Enclosure_Name']] = url print(encurls) for name, records in ( ('enclosures', all_enclosures), ('servers', all_servers), ('mezzanines', all_mezzanines), ): filepath = os.path.join(JSONDIR, name) dump_data(os.path.join(JSONDIR, name), records) logging.info('%s | %s records' %(name, len(records))) ks = {} for record in records: for key, value in record.items(): if not key in ks: ks[key] = 0 if len(value) > ks[key]: ks[key] = len(value) dump_data(os.path.join(JSONDIR, 'encurls'), encurls)
def main(): SOCKET.bind((HOST, PORT)) SOCKET.listen(1) while True: try: connection, address = SOCKET.accept() except KeyboardInterrupt: exit_gracefully() logging.info('New connection from [{}]'.format(address)) try: data = connection.recv(4096).decode() command, key, value = parse_message(data) if command == 'STATS': response = COMMAND_HANDLERS['STATS']() elif command in ( 'GET', 'GETLIST', 'INCREMENT', 'DELETE' ): response = COMMAND_HANDLERS[command](key) elif command in ( 'PUT', 'PUTLIST', 'APPEND', ): response = COMMAND_HANDLERS[command](key, value) else: response = (False, 'Unknown command type [{}]'.format(command)) COMMAND_HANDLERS['UPDATE_STATS'](command, response[0]) except MissingArgumentException as e: connection.sendall('False;{}'.format(e)) except UnicodeDecodeError: logging.info("Client disconneted") finally: if 'response' in locals(): connection.sendall('{};{}'.format(response[0], response[1])) connection.close()
def main(): commandout = {} for filename in os.listdir(TEXTDIR): filepath = os.path.join(TEXTDIR, filename) system, command = filename.split('.') with open(filepath) as f: lines = f.readlines() for name in PARSERS.get(command, []): if not name in commandout: commandout[name] = [] function = getattr(defs_parsers, 'p_'+name) records = function(system, lines) commandout[name] += records for command, records in commandout.items(): records = sort_records(records) filepath = os.path.join(JSONDIR, command) dump_data(filepath, records) logging.info('%s | %s records' %(command, len(records))) return
def main(): filepath = os.path.join(JSONDIR, 'graph') graph = load_data(filepath, {}) filepath = os.path.join(JSONDIR, 'link') links = load_data(filepath, {}) filepath = os.path.join(JSONDIR, 'rels') swports_rels = load_data(filepath, {}) linksD = {'%s %s %s %s' %(r['Switch1'], r['Port1'], r['Switch2'], r['Port2']): r for r in links } records = [] for swport1, swports in swports_rels.items(): for swport2 in swports: sw1 = swport1.split()[0] sw2 = swport2.split()[0] sws = '%s %s' %(sw1, sw2) if not sws in sw_treads: treads = walk_graph(graph, swport1, swport2) else: treads = [[swport1] + tread + [swport2] for tread in sw_treads[sws]] nodes = form_nodes(treads) links = form_links(treads, linksD) nodes = list(nodes.items()) records.append({'Node1':swport1, 'Node2':swport2, 'Treads':treads, 'Nodes': nodes, 'Links': links}) filepath = os.path.join(JSONDIR, 'path') dump_data(filepath, records) logging.info('%s | %s records' %('path', len(records))) return
def main(): filepath = os.path.join(JSONDIR, 'zone') zones = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'alias') aliases = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'port') ports = load_data(filepath, []) filepath = os.path.join(JSONDIR, 'portshow') portshow = load_data(filepath, []) alirelations, swport_alias, alias_swport = form_rels(zones, aliases, ports, portshow) swportrelations = form_swport_rels(alirelations, alias_swport) records = [] for swport, aliases in swport_alias.items(): aliases = [a.split()[1] for a in aliases] records.append({'Swport': swport, 'Aliases': aliases}) filepath = os.path.join(JSONDIR, 'swport_alias') dump_data(filepath, records) logging.info('%s | %s records' %('swport_alias', len(records))) records = [] for alias, swports in alias_swport.items(): records.append({'Alias': alias, 'Swports': list(swports)}) filepath = os.path.join(JSONDIR, 'alias_swport') dump_data(filepath, records) logging.info('%s | %s records' %('alias_swport', len(records))) records = [] for port, relation in swportrelations.items(): records.append({'Port': port, 'Relation': relation}) filepath = os.path.join(JSONDIR, 'port_relation') dump_data(filepath, records) logging.info('%s | %s records' %('port_relation', len(records))) filepath = os.path.join(JSONDIR, 'rels') dump_data(filepath, swportrelations)
def exit_gracefully(*args): logging.info("Shutting down...") sys.exit(0)
def main(): records = f_etrunk() + f_ftrunk() + form_f_links() filepath = os.path.join(JSONDIR, 'link') dump_data(filepath, records) logging.info('%s | %s records' %('link', len(records)))
def main(): records = [] filepath = os.path.join(JSONDIR, '3par/sys') sys_data = load_data(filepath) raw_total_sizes = {r["Storage"]: int(r["TotalCap"]) for r in sys_data} raw_alloc_sizes = {r["Storage"]: int(r["AllocCap"]) for r in sys_data} filepath = os.path.join(JSONDIR, '3par/vv') data = load_data(filepath) sizes = {} for record in data: storage = record['Storage'] if not storage in sizes: sizes[storage] = { 'full': 0, 'cpvv': 0, 'tpvv': 0, 'snp': 0, 'tpvv_used': 0, 'tpvv_free': 0, 'copy': 0 } prov = record['Prov'] size = int(record['VSize_MB']) used_size = int(record['Usr_Used_MB']) if record['Usr_Used_MB'] != '--' else 0 copy_size = int(record['VSize_MB']) if record['SnpCPG'] != '--' and record['UsrCPG'] != '--' else 0 sizes[storage][prov] += size sizes[storage]['tpvv_used'] += used_size if prov == 'tpvv' else 0 sizes[storage]['copy'] += copy_size for storage, stordict in sizes.items(): raw_total = raw_total_sizes.get(storage) raw_alloc = raw_alloc_sizes.get(storage) TOTAL = raw_total/2*0.95 RESERVE = raw_total/2*0.05 USED = stordict['full'] + stordict['cpvv'] + stordict['tpvv'] + stordict['copy'] FREE = TOTAL - USED REAL = stordict['full'] + stordict['cpvv'] + stordict['tpvv_used'] reserve_used = raw_alloc/2 - REAL reserve_overused = 0 print(RESERVE,reserve_used) if reserve_used < 0: reserve_overused = reserve_used reserve_used = RESERVE reserve_free = 0 elif RESERVE > reserve_used: reserve_free = RESERVE - reserve_used else: reserve_overused = reserve_used - RESERVE reserve_free = 0 FREE = FREE - reserve_overused OVERPROVISIONED = 0 if FREE < 0: OVERPROVISIONED = -FREE USED += FREE FREE = 0 sizes[storage]['TOTAL'] = TOTAL sizes[storage]['USED'] = USED sizes[storage]['FREE'] = FREE sizes[storage]['OVERPROVISIONED'] = OVERPROVISIONED sizes[storage]['RESERVE'] = RESERVE sizes[storage]['RESERVE_OVERUSED'] = reserve_overused sizes[storage]['tpvv_free'] = stordict['tpvv'] - stordict['tpvv_used'] sizes[storage]['reserve_used'] = reserve_used sizes[storage]['reserve_free'] = reserve_free record = {k: round(v/1024.0/1024, 2) for k, v in stordict.items()} record['Storage'] = storage records.append(record) logging.info(storage) sorted_systems = load_data(os.path.join(JSONDIR, 'sorted_systems'), []) records = sort_storage_records(records, sorted_systems) filepath = os.path.join(JSONDIR, 'capacity_3par') dump_data(filepath, records)