def main(): log.log_init() load_dbg() info_man.dump() #info_man.load() info_man.gen_report() info_man.open_report() print "-------END---------"
def build(self, step=None): """ 执行本地构建 @param step: 本地构建步骤名,详情参考使用说明 """ self.clean() logger = log_init() if not self.prepare_env(logger): return -1 if step: cmd = 'echo "export STEP=%s" >> script/setup_env.sh' % step if os.system(cmd) != 0: logger.error("build fail") return -1 code = trace_execute("bash -x {0} {1}".format( self.local_build_shell_path, step), env=self.env, logger=logger) if code != 0 and code != "0": logger.error("build fail") return -1 else: logger.info("build success") return 0 else: logger.error("step be required") return -1
def main(): if not os.path.exists('./downloads'): os.mkdir('./downloads/') logger = log.log_init(os.path.basename(__file__)) logger.info('Starting...') try: simplified_save = lambda text: fetch_and_save( text, get_safe_filename(f'{text}.wav', logger), logger) with open('script.txt', 'r') as f1: lines = f1.readlines() logger.info( f'Retrieved script from script.txt. {len(lines)} lines total.') for n, line in enumerate(lines): line = line.replace('/n', '').replace('/r', '') n = n + 1 logger.info( f'Processing line {n} of {len(lines)} ({(n / len(lines)) * 100:.0f}%), ' f'{len(lines) - n} left.') if len(line) > 257: logger.warning( 'Line is longer than 256 characters. Processing as two parts.' ) simplified_save(line[:257]) simplified_save(line[257:]) else: simplified_save(line) logger.info('Work complete.') except Exception as e: logger.fatal(e)
def main(): ''' Mini_Spider 主程序 ''' log.log_init(LOG_PATH + '/spider') spider_conf = opt_read() if spider_conf is None: logging.debug("No config file input!") sys.exit(-1) c=WebCrawl() #读取抓取配置信息 ret = c.load_conf(spider_conf) if utils.SUCCESS_STATUS != ret: logging.error("Load Conf Failled : [%s]" % ret) sys.exit(-1) #开始抓取 c.crawl()
def main(): ''' Mini_Spider 主程序 ''' log.log_init(LOG_PATH + '/spider') spider_conf = opt_read() if spider_conf is None: logging.debug("No config file input!") sys.exit(-1) c = WebCrawl() #读取抓取配置信息 ret = c.load_conf(spider_conf) if utils.SUCCESS_STATUS != ret: logging.error("Load Conf Failled : [%s]" % ret) sys.exit(-1) #开始抓取 c.crawl()
def run(self): print "Daemon Running" try: # Init Log log.log_init("running.log") # Set Signal Handle sig_handle.signal_regist() # Init Time & Tcp module regist.time_module_conf_init() regist.tcp_module_conf_init() time_list = regist.time_module_init() tcp_list = regist.tcp_module_init() # Init & Insert Time module To Running List time_global = framerun_base.time_run_base() for i in time_list: time_global.time_add(i) # Init & Insert Tcp module To Running List tcp_global = framerun_base.epoll_base() for i in tcp_list: tcp_global.epoll_add(i) while True: if sig_handle.Stop_Flag == 1: logging.info("Master process start to clean environment") time_global.process_exit() os.remove(pid_file) logging.info("Master process environment clean finish and exit") sys.exit(0) # Event Runnning Loop if tcp_global.event_count() > 0: tcp_global.epoll_loop() if time_global.event_count() > 0: time_global.time_loop() if sig_handle.Restart_Flag == 1: # Restart Python Program logging.info("Start to Restart Program") sig_handle.restart_program() if len(sig_handle.User_signal) > 0: # Check Specify File if os.path.exists("signal.string"): fd = file("signal.string", 'r') signal_string = fd.read().strip() fd.close() # Send Signal to Specify Process for i in sig_handle.User_signal: time_global.signal_specify_send(i, signal_string) sig_handle.User_signal.remove(i) os.unlink("signal.string") else: for i in sig_handle.User_signal: time_global.signal_loop(i) sig_handle.User_signal.remove(i) time.sleep(0.01) except Exception, e: mailname = "EventModuleMaster Down" error_buff = str(e) + ":" + traceback.format_exc() logging.error(error_buff) os.remove(pid_file)
import openseespy.opensees as ops import log import nodes import element import recorder import load import analysis log.log_init("log.log", 1) log.logger.info("begin to opensees") ops.reset() ops.wipe() log.logger.info("System, ndm = 3, dnf = 6") ops.model("basic", "-ndm", 3, "-ndf", 6) log.logger.info("Pre Processing of node") nodes.node_create() nodes.fixed_create() log.logger.info("Pre Processing of element") element.material_create() element.section_create() element.geometric_Transf_create() element.element_create() log.logger.info("recorder") recorder.recorder_create()
return False logging.info('parse page num success, page_num:%d' % (self.page_num)) # 获取该业上所有楼盘子链 soup_house_list = soup.select('div#newhouse_loupai_list div.nlcd_name > a') for i, soup_house in enumerate(soup_house_list): if i < self.curr_item: logging.info('skip page:%d, item:%d' % (self.curr_page, i)) continue house_url = soup_house['href'] logging.info('house url:%s' % (house_url)) time.sleep(random.randint(8, 12)) house = create_house_by_url(house_url) if house: house.parse() self.writer.write(house.to_string()) # 更新断点 self.update_checkpoint(len(soup_house_list)) self.save_checkpoint((self.curr_page, self.curr_item)) return True if __name__ == '__main__': log_init('fetcher', '../log') fetcher = Fetcher() fetcher.fetch()
break except Exception, e: logging.error(('parse history price failed: %s, url:%s') % (str(e), self.url)) return False logging.info('parse history price success') return True def to_string(self): houseinfo = {} houseinfo['url'] = self.url houseinfo['name'] = self.name houseinfo['score'] = self.score houseinfo['price'] = self.price houseinfo['tags'] = self.tags houseinfo['region'] = self.region houseinfo['address'] = self.address houseinfo['last_kaipan'] = self.last_kaipan houseinfo['story'] = self.story houseinfo['huxing'] = self.huxing houseinfo['loudong'] = self.loudong return json.dumps(houseinfo, ensure_ascii=False) if __name__ == '__main__': log_init('house', '../log') house = create_house_by_file( '/Users/liyang83/Desktop/work/code/maifang/bin/1.html') house.parse()
dict[key] = value; return dict def rg_cpu_table(self): table = [] show = self.rg_show("show cpu", wait_time=0.5) tmp = re.findall(r"(\d+)\s+(\d+\.\d+%)\s+(\d+\.\d+%)\s+(\d+\.\d+%)\s+(.+?)\s*\r?\n", show) for no,five_sec,one_min,five_min,process in tmp: table.append({"no": no, "5sec": five_sec, "1min": one_min, "5min": five_min, "process": process}) return table def telnet_test(): host="192.168.197.113" port=23 username="******" password="" timeout=3 debug_on=False; prompt="(#|>|# )" prompt_is_regexp=True vty=RgTelnet(host, port, username, password, timeout, debug_on, prompt, prompt_is_regexp); vty.login("root", ""); vty.rg_send_cmd("show sslvpn gateway | inc Service"); out=vty.rg_read_all(); print out if __name__ == '__main__': log.log_init(); telnet_test
try: # read command from cmd line if it's there (in that case # options are passed via environment variable) if len(sys.argv) > 1: command = sys.argv[1] options = json.loads(os.getenv("QUARTO_JUPYTER_OPTIONS")) del os.environ["QUARTO_JUPYTER_OPTIONS"] # otherwise read from stdin else: input = json.load(sys.stdin) command = input["command"] options = input["options"] # initialize log log_init(options["log"], options["debug"]) # start the server (creates a new detached process, we implement this here # only b/c Deno doesn't currently support detaching spawned processes) if command == "start": trace('starting notebook server subprocess') run_server_subprocess(options, status) # serve a notebook (invoked by run_server_subprocess) elif command == "serve": trace('running notebook server subprocess') run_server(options) # execute a notebook and then quit elif command == "execute": trace('running notebook without keepalive')
f.write('\n') # delete f.write('## 以下楼盘已下线\n') for url, house in self.delete_houses.items(): name = house['name'] f.write('### %s\n' % (name.encode('utf-8'))) f.write('\n') f.write('字段 | 值\n') f.write('--- | ---\n') for k, v in house.items(): f.write(k) f.write(' | ') f.write(json.dumps(v, ensure_ascii=False).encode('utf-8')) f.write('\n') f.write('\n') logging.info('save done') return True if __name__ == '__main__': log_init('diff', '../log') today = strftime("%Y%m%d", localtime()) yesterday = strftime("%Y%m%d", localtime(time() - 86400)) old_path = '../data/newhouse.%s' % (today) new_path = '../data/newhouse.%s' % (yesterday) output_path = '../data/diff.%s.%s.md' % (yesterday, today) gen = DiffReportGenerator(old_path, new_path, output_path) gen.generate()