def simulation(): """ Function to install handlers on the /simulation path. This allows for requesting simulation data or starting a new simulation. Parameters ---------- POST: servers: list List containing configurations for a server pool as dicts. { capacity: int, size: int, kind: string } For example, { size: 10, capacity: 10, kind: 'regular' }. process: list List specifying how a process should go (from server to server). This should contain a sequence of server kinds. For example, ["regular", "balance", "pay"]. runtime: int Runtime of the simulation (defined by simpy package). Returns ------- GET: dict POST: int """ if request.method == "POST": # nonlocal use of the simulation count nonlocal simc # increment the simulation count simc += 1 # we need a new environment which we can run. environment = Environment() # we need a server pool servers = MultiServers() # iterate over all of the servers that need to be configured that # we received from the client for kind in request.form['kinds'].split(','): # append a new server pool to the multiserver system servers.append( Servers(environment, size=int(request.form['size']), capacity=int(request.form['capacity']), kind=kind.strip())) # Get the current date and time to append to the logger file name log_timestamp = datetime.now().strftime("%Y-%m-%d_%H-%M") # now that we have an output dir, we can construct our logger which # we can use for the simulation name = "{0}_{1:04d}_{2}".format(file_prefix, simc, log_timestamp) logger = Logger(name, directory=LOG_PATH) # we also need a logger for all error events that happen in the simulation error_logger = Logger(f"error-{name}", directory=LOG_PATH) # Enter first line for correct .csv headers logger.info( 'Time;Server;Message_type;CPU Usage;Memory Usage;Latency;Transaction_ID;To_Server;Message' ) error_logger.info('Time;Server;Error type;Start-Stop') # we can use the logger for the simulation, so we know where all logs will be written environment.logger(logger) environment.logger(error_logger, type="error") # we need a new form of seasonality seasonality = Seasonality( join(Seasonality_folder, Seasonality_file), max_volume=int(request.form['max_volume'])) # now, we can put the process in the simulation Processor(environment, servers, seasonality=seasonality, kinds=[ kind.strip() for kind in request.form['process'].split(',') ]) # run the simulation with a certain runtime (runtime). this runtime is not equivalent # to the current time (measurements). this should be the seasonality of the system. # for example, day or week. environment.run(until=int(request.form['runtime'])) # expose the id of the simulation return jsonify(simc) if request.method == "GET": if 'id' in request.args: logfile_id = "{:04d}".format(int(request.args.get('id'))) # Scan the logfile directory list_of_files = glob.glob(os.path.join(LOG_PATH, 'log_*.csv')) # Return only the filename to get no errors with old functions log_filenames = [ os.path.basename(filename) for filename in list_of_files ] if log_filenames: logfile_ids = [f.split('_')[1] for f in log_filenames] name_id_dict = dict(zip(logfile_ids, log_filenames)) if logfile_id in logfile_ids: # Logfile associated to given ID was successfully found return jsonify({ "data": name_id_dict[logfile_id], "message": "success" }) else: # No logfile associated to given ID was found return jsonify( {"message": "No logfile (.csv) with given ID exists."}) else: # No logfiles found (/logs is empty) return jsonify({"message": "No logfiles were found in /logs."})
class LianjiaDealData: def __init__(self): self.logger = Logger(logname='/var/log/houseData.log', loglevel=1, logger="houseDataLogger").getLogger() ''' set the cell style ''' def set_style(self, name,height,bold=False): # init style style = xlwt.XFStyle() # create font for style font = xlwt.Font() font.name = name # 'Times New Roman' font.size = 10 #font.bold = bold font.color_index = 4 font.height = height # borders= xlwt.Borders() # borders.left= 6 # borders.right= 6 # borders.top= 6 # borders.bottom= 6 style.font = font # style.borders = borders return style def get_deal_data(self, soup): # row index n = 0 # tunple for values to insert db values = [] for item_name in soup.findAll('div', {'class': 'info-panel'}): self.logger.info('Collecting %s row' % n) # str index j = 0 # cloumn index m = 0 # flag for data from other agent f = False # array to get the column value arr = (2, 8, 11) # len of item_name.strings k = 0 for str in item_name.strings: k = k + 1 if k < 14: arr = (k-6, k-3) value = [] for str in item_name.strings: if j == 0: tmp = str.split(' ') l = 0; while l < len(tmp): # 71平米 --> 71 if l == 2: value.append(re.sub('\D', '', tmp[l])) else: value.append(tmp[l]) # update column index to next column m = m + 1 l = l + 1 elif j == 1 and str == u'历史成交,暂无详情页': f = True arr = (k-6, k-3) elif j == 1 or (j == 2 and f): tmp = str.split('/') l = 0; while l < len(tmp) - 1: value.append(tmp[l]) # update column index to next column m = m + 1 l = l + 1 if f: value.append('') m = m + 1 elif j == 2 and k < 14: value.append('') m = m + 1 elif j == k - 8: if len(str) == 7: str = str + '.01' value.append(str) m = m + 1 elif j in arr: value.append(str) # update column index to next column m = m + 1 # update str index to next column j = j + 1 if len(value) == 9: values.append(value) # update row index to the next row n = n + 1 self.logger.info('%s rows data has been collected; the length of list stores the collected data is %s' % (n, len(values))) self.logger.info('the collected data is: %s' % values) return values def get_response(self, url): # add header to avoid get 403 fobbiden message i_headers = {'User-Agent': 'Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:41.0) Gecko/20100101 Firefox/41.0', 'Cookie': 'lianjia_uuid=2ebbfb89-6086-41ba-8c8b-06299a4ef5c8; lianjia_token=1.0092e5f86f1e8105f63d6d6fd4d79d9738'} request = urllib2.Request(url, headers = i_headers) try: response = urllib2.urlopen(request) except Exception, e: sys.stderr.write(str(e) + '\n') return None return response
# ---------------------------------------------------------------------------------------------------------------------- from lib.Kernel import Kernel from lib.toolkit import load_properties # Start-up Kernel kernel = Kernel(load_properties("conf/os.properties")) log = kernel.logger # ---------------------------------------------------------------------------------------------------------------------- # Logger # ---------------------------------------------------------------------------------------------------------------------- import gc from lib.Logger import Logger log = Logger() log.info("Hello!") log.error("Critical Issue!!") log.debug("Free memory: " + str(gc.free_mem())) # ---------------------------------------------------------------------------------------------------------------------- # Update DuckDNS # ---------------------------------------------------------------------------------------------------------------------- from lib.toolkit import update_duck_dns # Update DuckDNS service update_duck_dns("mydomain", "mytoken", "192.168.0.10") # ---------------------------------------------------------------------------------------------------------------------- # # NodeMCU Examples #
class ApacheJiraParser: def __init__(self, type, apache_id): self.type = type self.apache_id = str(apache_id) self.full_id = self.type.upper() + '-' + self.apache_id self.url = 'https://issues.apache.org/jira/browse/' + self.full_id self.data = "" self.logger = Logger(__name__) def parse(self): self.logger.info("Retrieving JIRA: %s", self.url) buffer = StringIO() c = pycurl.Curl() c.setopt(c.URL, self.url) c.setopt(c.WRITEFUNCTION, buffer.write) c.perform() c.close() content = str( BeautifulSoup( buffer.getvalue(), 'html.parser' ).find(id='descriptionmodule') ) self.logger.info("JIRA retrieved") if content is None or content == 'None' or content.strip() == "": self.logger.info('No description was found for ID: %s', self.full_id) elif re.search(".*Exception.*", content) is None and re.search(".*Caused by.*", content) is None: self.logger.info('No Exception or Cause By found for ID: %s', self.full_id) else: # strip HTML tags tag_re = re.compile(r'(<!--.*?-->|<[^>]*>)') self.data = cgi.escape(tag_re.sub('', content)) self.logger.info("Striping HTML tags") return self def write(self): if not self.data: self.logger.info('No data was found for ID: %s, skipping writing file..', self.full_id) return text_file = open("data/" + self.type.lower() + '/' + self.full_id, "w") text_file.write(self.data) text_file.close()
import logging def suites(): suite = unittest.TestSuite() loader = unittest.TestLoader() suite.addTests(loader.loadTestsFromTestCase(WeatherTest)) #suite.addTests(loader.loadTestsFromTestCase(BugfreeImportFile)) # suite.addTests(loader.loadTestsFromTestCase(ProductAdd)) #suite.addTests(loader.loadTestsFromTestCase(LoginLogoutTest)) return suite if __name__ == "__main__": logger = Logger(loglevel=logging.ERROR).getlog() logger.info('日志开始') try: suite = suites() fp = open( './reports/results_%s.html' % time.strftime("%Y-%m-%d %H-%M-%S"), 'wb') runner = HTMLTestRunner(stream=fp, title=u'接口测试报告', description=u"测试用例执行情况:") runner.run(suite) except Exception, e: raise e finally: fp.close() logging.info('日志结束')
from lib.Logger import Logger import logging def suites(): suite=unittest.TestSuite() loader=unittest.TestLoader() suite.addTests(loader.loadTestsFromTestCase(WeatherTest)) #suite.addTests(loader.loadTestsFromTestCase(BugfreeImportFile)) # suite.addTests(loader.loadTestsFromTestCase(ProductAdd)) #suite.addTests(loader.loadTestsFromTestCase(LoginLogoutTest)) return suite if __name__ == "__main__": logger = Logger(loglevel=logging.INFO).getlog() logger.info(u'日志开始') try: suite = suites() fp = open('./reports/results_%s.html' % time.strftime("%Y-%m-%d %H-%M-%S"), 'wb') runner = HTMLTestRunner( stream=fp, title=u'接口测试报告', description=u"测试用例执行情况:") runner.run(suite) except Exception, e: raise e finally: fp.close() logging.info(u'日志结束')
from lib.HTMLTestRunner import HTMLTestRunner from testcase.admin_login_logout.admin_login_correction import AdminLoginCorrection from lib.SendEmail import send_email FROM_ADDR = u"*****@*****.**" FROM_PSWD = u"qwer1234" # 163设置的第三方授权码 TO_ADDR = u"*****@*****.**" def suites(): suite = unittest.TestSuite() loader = unittest.TestLoader() suite.addTests(loader.loadTestsFromTestCase(AdminLoginCorrection)) return suite if __name__ == "__main__": logger = Logger().getlog() logger.info('start testcase...') report_path = 'result/test_result_%s.html' % time.strftime( "%Y-%m-%d %H-%M-%S") fp = open(report_path, 'wb') runner = HTMLTestRunner(stream=fp, title=u"测试报告", description=u"测试用例执行情况: ") runner.run(suites()) fp.close() send_email(FROM_ADDR, FROM_PSWD, TO_ADDR, u"测试报告", report_path) logger.info('stop testcase...')
logger.info('valid token') return config['facebook']['access_token'] else: logger.info('invalid token, try get new one') fb = Facebook() if fb.login(username, password): logger.info('login success') token = fb.get_token() if token: logger.info('get new token') config.Set('facebook', 'access_token', token) return token else: logger.error('get token failed') else: logger.info('login failed') return False if __name__ == '__main__': token = check_token() if token: graph = Graph(token) event_id = graph.createEvent(facebook_group_id, title, description, start_time, end_time) if event_id: logger.info('http://www.facebook.com/events/' + event_id) else: logger.error('event create error')
zl = Job51(username, password) #zl.login() zl.search_job() zl.custom_select_job() if __name__ == "__main__": d1 = datetime.datetime.now() #生成日志文件 ZHAOPIN_SITE = {'zhilian': zhilian_shoot} now = time.strftime('%Y-%m-%d %H-%M-%S', time.localtime(time.time())) logger = Logger("./log/test_shoot_%s.log" % now, loglevel=logging.INFO).getlog() count = len(open('stu_data.txt', 'rU').readlines()) user_dict = get_user_dict_from_file('stu_data.txt') for k, v in user_dict.items(): print k, v try: #cjol_shoot(k, v) #zhilian_shoot(k, v) job51_shoot(k, v) logger.info(k + ' is success!!!') except Exception as e: traceback.print_exc() logger.error(k + ' is failed!!!') exit(0) eslapse_time = datetime.datetime.now() - d1 logger.info('本次投递 ' + str(count) + ' 个学生的简历共计用时:' + str(eslapse_time))
import common from lib.Logger import Logger from lib.Config import Config from lib.PTT import PTT logger = Logger('h4_invitation_notifier_ptt').__new__() config = Config() ID = config['bbs']['user'] PASSWORD = config['bbs']['pass'] PartyDate = common.thisThursday() board = 'Linux' subject = 'HackingThursday 固定聚會 (%s)' % PartyDate content = common.html2txt(common.get_wikidot_content_body('http://www.hackingthursday.org/invite')) if __name__ == '__main__': ptt = PTT() if ptt.login(ID, PASSWORD): logger.info('login ptt') if ptt.enter(board): logger.info('enter %s board' % board) if ptt.post(subject, content): logger.info('post article') if ptt.quit(): logger.info('quit ptt')