def get_user(self, user_id): log = Log() log.action = 'get_user' log.sender = self log.log_var(user_id=user_id) res = None log.log_var(res=res) try: res = self.vk_session.users.get(user_ids=user_id) log.log_var(res=res) log.status = 'OK' except vk_api.VkApiError as e: log.status = 'Exception' log.log_var(exception_info=e) self.class_logger.log_info(log) return res
def __init__(self, measurement_sim, solver=LinearInversion()): """ Instantiates RhoEstimator class. Attributes: measurement_sim: Measurement simulator class object with measurement data solver: kwarg that articulates the solver to be used in further analysis. Defaulted to Linear Inversion """ # Instantiate information from measurement simulation self.MeasSim = measurement_sim # Establish variables necessary to store rho_hat information self.rho_hat = None self.rho_hat_fid = None self.rho_hat_purity = None # Establish variables necessary to collect bootstrap information self.boot_rhos = [] self.boot_fids = [] self.boot_purities = [] #Instantiate solver self.Solver = solver #Instantiate log from orchestrator (Master log not used) self.BootLog = Log( "Not Applicable", "s%s_%s_n%s_t%s_Boot-Log" % (str(self.MeasSim.seed).replace( "-", ""), self.Solver.solver_method, self.MeasSim.noise_perc, self.MeasSim.rho_hat_trials), "Verbose-Log") #Set boot log filename self.BootLogFilename = self.BootLog.boot_log_name + ".csv" #Determine rho_true fidelity self.MeasSim.rho_true_fidelity = self.calculateFidelity( self.MeasSim.rho_true) #Set the estimator for the Solver self.Solver.setEstimator(self)
def get_conversation_members(self, peer_id): log = Log() log.action = 'get_conversation_members' log.sender = self log.log_var(peer_id=peer_id) res = None log.log_var(res=res) try: res = self.vk_session.messages.getConversationMembers(peer_id=peer_id)['profiles'] log.log_var(res=res) log.status = 'OK' except vk_api.VkApiError as e: log.status = 'Exception' log.log_var(exception_info=e) self.class_logger.log_info(log) return res
def get_site(url): log = Log() log.action = 'get_site' log.log_var(url=url) response = None log.log_var(response=response) try: if url.find('http') < 0: url = 'http://'+url response = requests.get(url) log.log_var(response=response) log.status = 'OK' except requests.RequestException as e: log.status = 'Exception' log.log_var(exception_info=e) logger.log_info(log) return response
def main(): import random import time ### STB to do: Unit test parsings ## Test the FREDInputBatch #fredBatch = FredBatchInputs() #fredBatch.setupFromJsonListFile("sample_json_simulator_configuration.txt") connections = {} #sys.exit() ### Test on Blacklight logger = Log(logFileName_='./test.log') for i in range(0, 1): tempId = random.randint(0, 100000) #if i < 2: connections[tempId] = SSHConn(logger, machineName_='fe-sandbox.psc.edu', debug_=True) print connections[tempId].createPBSRunScript( simWS.configuration['simulators']['fred_V1_i'], 1012) #else: # connections[tempId] = SSHConn(logger,machineName_='unicron.psc.edu',debug_=True) connections[tempId]._mkdir( simWS.configuration['simulators']['test']['runDirPrefix'] + "." + str(tempId)) jtype, pbsId = connections[tempId].submitJob(tempId, simId="Test_ID", simulator="test", size="debug") while True: completed = True for id, conn in connections.items(): status, response = conn.getStatus(pbsId) print "For Conn %s Status,Response: %s,%s" % (str(id), status, response) if status != "COMPLETED": completed = False if completed is True: break print "For ----------" time.sleep(1)
class testBaidu(unittest.TestCase): log = Log() def setUp(self): print "1111" #self.driver = webdriver.Firefox() #self.driver.implicitly_wait(30) #self.driver.maximize_window() #self.driver.get('http://www.baidu.com') def testTitle(self): self.log.info("-------start-------") #self.assertNotEqual(u"百度一下,你就知道",self.driver.title) self.assertEqual(1,1) print "assert" def tearDown(self): #self.driver.close() #self.driver.quit() print "111111122222"
def test_c_browsergroup(self): """查询新建的集团""" try: driver=self.param driver.switch_to_frame("iframeId") #输入集团名称 driver.find_element(*chaxun_new_group()[0]).send_keys(data) #点击查询 driver.find_element(*chaxun_new_group()[1]).click() groupname=driver.find_element(*chaxun_new_group()[2]).text self.assertEqual(data,groupname) phone=driver.find_element(*chaxun_new_group()[3]).text self.assertEqual('18512302010',phone) time.sleep(10) Log().info("查询到新建的集团为:%s"%groupname) except AssertionError as e: raise(e) finally: driver.switch_to.default_content()
def __init__(self): # 发送邮件服务器 self.smtpserver = smtpserver # 发送邮箱用户/密码 self.user = user self.password = password # 发送邮箱 self.sender = sender # 接收邮箱 self.receiver = receiver #抄送 self.cc = cc #标题 self.subject = subject #发送方式 self.subtype = subtype #SMTP初始化 self.smtp = smtplib.SMTP() #打印日志 self.log = Log()
def create_session(self, group_token, group_id): log = Log() log.sender = self log.action = 'create_session' log.log_var(group_id=group_id) try: api = vk_api.VkApi(token=group_token) log.log_var(api=api) self.longpoll = api_longpoll.VkBotLongPoll(api, group_id) self.vk_session = api.get_api() log.log_var(longpoll=self.longpoll, vk_session=self.vk_session) log.status = 'OK' except vk_api.VkApiError as e: log.status = 'Exception' log.log_var(exception_info=e) self.class_logger.log_info(log)
def cli(gui): log= Log() log.basic_config( logfile_name='coupon', logfile_path='log', file_level= logging.INFO, console_level= logging.CRITICAL ) log.get_logger() logging.debug('Setup loggers') if gui: logging.info('Running Graphical User Interface Application') main() else: logging.info('Running terminal script') click.echo('Publix Coupon Clipper: \n') username= click.prompt('Please enter your username') password= click.prompt('Please enter your password', hide_input=True) run_script(username, password)
def compare_values(name, arg1, arg2): if type(arg1) is float: err = abs(arg1 - arg2) return float_err_status(name, err) elif (type(arg1) is list) and (len(arg1) > 0) and (type(arg1[0]) is float): err = [abs(x1 - x2) for x1, x2 in zip(arg1, arg2)] arr_errors = [(min(err), "min"), (sum(err) / len(err), "mean"), (max(err), "max")] arr_err_status = Status.OK for arr_err, err_name in arr_errors: arr_err_status = float_err_status(name, arr_err, err_name) if arr_err_status is not Status.OK: break return arr_err_status elif arg1 != arg2: Log().status_info("\"{0}\": values are not same".format(name), status=Status.FAILED) return Status.FAILED return Status.OK
def edit_msg(self, session_event, msg_id, text): log = Log() log.action = 'edit_msg' log.sender = self log.log_var(event=session_event, msg_id=msg_id, text=text) try: try: self.vk_session.messages.edit( peer_id=session_event.obj['peer_id'], message=text, message_id=msg_id ) except vk_api.VkApiError: self.write_msg(session_event, text) log.status = 'OK' except vk_api.VkApiError as e: log.status = 'Exception' log.log_var(exception_info=e) self.class_logger.log_info(log)
def __init__(self, cluster, json_dir, user_number): self.cluster = cluster self.log = Log() self.json_dir = json_dir self.cluster = cluster self.scheduler = Scheduler(cluster) self.block_list = list() self.job_list = list() # list of lists. A job list for each user. self.event_queue = queue.PriorityQueue() self.timestamp = 0 self.user_number = user_number self.total_application_type = 1 self.app_map = OrderedDict() # map from user id to app id self.job_durations = {} self.stage_durations = {} self.job_execution_profile = { } # record the execution information of jobs # generate the job list for each user. All users share the rdd_list and block list for user_index in range(0, user_number): # each user randomly chooses an application # application_number = random.randint(1, self.total_application_type) application_number = user_index + 1 self.app_map[user_index] = application_number # stage_profile_path = 'Workloads/stage_profile.json' % (json_dir, application_name) stage_profile_path = "Workloads/stage_profile.json" # read stage_profile XX self.stage_profile = json.load(open(stage_profile_path, 'r'), object_pairs_hook=OrderedDict) print("stage_profile loaded") runtime_path = "Workloads/runtime.json" # read runtime fille XX self.runtime_profile = json.load(open(runtime_path, 'r'), object_pairs_hook=OrderedDict) print("runtime_profile loaded") # self.generate_rdd_profile(user_index) job_path = "Workloads/job.json" # read job file XX self.job_profile = json.load(open(job_path, 'r'), object_pairs_hook=OrderedDict) print("job_profile loaded") self.generate_job_profile(user_index)
def clean_html(self, url): log = Log() log.sender = self log.action = 'clean_html' res = None log.log_var(url=url, res=res) try: article = newspaper.Article(url) log.log_var(article=article) article.download() article.parse() cleaned = article.text res = cleaned log.log_var(res=res) log.status = 'OK' except newspaper.ArticleException as e: log.log_var(exception_info=e) log.status = 'Exception' self.logger.log_info(log) return res
def create_keyboard(self, button_text, url=None): log = Log(button_text=button_text) log.action = 'create_keyboard' log.sender = self ''' keyboard = "{\"inline\": true, " \ "\"buttons\": [[{\"action\": {\"type\": \"text\"," \ " \"payload\": \"{\"button\": \"1\"}\"," \ "\"label\": \""+button_text+"\"}, \"color\": \"positive\"}]]}" ''' if url is None: keyboard = "{\"inline\": true, " \ "\"buttons\": [[{\"action\": {\"type\": \"text\"," \ "\"label\": \"" + button_text + "\"}, \"color\": \"positive\"}]]}" else: keyboard = "{\"inline\": true, " \ "\"buttons\": [[{\"action\": {\"type\": \"open_link\"," \ "\"label\": \"" + button_text + "\", \"link\": \""+url+"\"}}]]}" log.log_var(keyboard=keyboard) log.status = 'OK' self.class_logger.log_info(log) return keyboard
class Login(unittest.TestCase): log = Log() '''考试系统''' def test_1_login_pass(self): '''账号密码正确,登录成功''' data = {"account": "17688169411", "password": "******"} r = requests.post(url, data=data) result = r.json()['data']['msg'] self.assertEqual("登录成功", result) self.log.info("手机号为" + data["account"] + "的用户," + result + '!') global login_token login_token = r.json()['data']['token'] def test_2_loginout(self): '''token值正确,退出登录成功''' data = { "token": login_token, } r = requests.post(url_loginout, data=data) result = r.json()['data']['msg'] self.assertEqual("退出成功", result) self.log.info("退出登录成功!")
def main(): config = Config() flag_arguments = config.setup_and_parse_flags() logger = Log('logs/logs.log') logger.log_start_end('started') try: config = config.load_config() fb_bot = FB(config) fb_bot.login_client() open_weather = OpenWeather(config["url"], config["defaultCity"], config["appKey"]) weather_data = open_weather.parse_and_form_message() fb_bot.send_messages( weather_data ) if flag_arguments.environment == 'prod' else print_mock_message( weather_data) fb_bot.logout() except: error = sys.exc_info() logger.log_error(error) fb_bot.logout() logger.log_start_end('finished')
def __init__(self, model, dataset, loss, optimizer, run_name, batch_size=100, device=None, test_size=10): """ Parameters ---------- model: initialized UserNet dataset: initialized MovieLens loss: one of the warp functions optimizer: torch.optim run_name: directory to save results batch_size: number of samples to process for one update device: gpu or cpu test_size: number of tail items for each user to leave for test """ self.best_loss = np.inf self.loss = loss self.model = model self.dataset = dataset self.optimizer = optimizer self.batch_size = batch_size self.logger = Log(run_name) self.device = get_device(device) self.model = self.model.to(self.device) self.train, self.test = self.train_test_split(test_size) self.test_loader = DataLoader(self.test, batch_size=batch_size, shuffle=True, num_workers=1) self.train_loader = DataLoader(self.train, batch_size=batch_size, shuffle=True, num_workers=10)
def run_server(addr, port): global task log = Log(__name__, level='INFO') log.info('Run httpd server until ctrl-c input') def shutdown(task): task.worker.stop() task.running = False def start(httpd, id): httpd.start() def signal_handler(signum, stack): log.info('Sending shutdown to httpd server') thread.start_new_thread(shutdown, (task, )) signal.signal(signal.SIGINT, signal_handler) server = Httpd(port=int(port), address=addr) task = WorkerThread(server, 'httpd') worker = WorkerTasks(tasks=[task], func=start) worker.run() worker.wait_for_completion(timeout_sec=-1) # run forever
def get_event_type(self, event): log = Log() log.action = 'get_event_type' log.sender = self log.log_var(event=event) res = None log.log_var(res=res) try: if event.type == api_longpoll.VkBotEventType.MESSAGE_NEW: res = 'MESSAGE_NEW' elif event.type == api_longpoll.VkBotEventType.MESSAGE_TYPING_STATE: res = 'MESSAGE_TYPING_STATE' else: res = 'UNKNOWN' log.log_var(res=res) log.status = 'OK' except vk_api.VkApiError as e: log.status = 'Exception' log.log_var(exception_ifo=e) self.class_logger.log_info(log) return res
def __init__(self, host_="warhol-fred.psc.edu", user_="apolloext", dbname_="apollo201", password_=None, logger_=None): self._host = host_ self._user = user_ self._dbname = dbname_ self._password = password_ self._conn = None self._DictCursor = None self._RegularCursor = None self.populationAxis = None if logger_ is None: self.logger = Log("./db.test.log") self.logger.start() else: self.logger = logger_ self.stateToPopulationDict = {'S':'susceptible', 'E':'exposed', 'I':'infectious', 'R':'recovered', 'C':'newly exposed', 'V':'received vaccine control measure', 'Av':'received antiviral control measure', 'ScCl':'school that is closed'} self.stateToDataFileDict = {'S':'susceptible.txt', 'E':'exposed.txt', 'I':'infectious.txt', 'R':'recovered.txt', 'C':'newly_exposed.txt', 'V':'vacc_administered.txt', 'Av':'av_administered.txt'}
def run(self, mutation_function, start=0, stop=0): self.l = Log(mutation_function, self.fuzz_file) last = self.l.get_last() if len(last) > 1: start = int(last) if mutation_function == "test_mutation": if start == stop: self.test_mutation() else: self.test_mutation(start, stop) if mutation_function == "bit_flip": if start == stop: self.bit_flip() else: self.bit_flip(start, stop) if mutation_function == "window_replace": self.window_replace(start) if mutation_function == "rand_mutation": if start == stop: self.rand_mutation() else: self.rand_mutation(start, stop) if mutation_function == "ascii_string_replace": if start == stop: self.strings() else: self.strings(start, stop) self.l.remove() return
import time # 每一次运行,都保存独立的日志,根据题目名称来保存 # 每个题目为单独一个文件夹,包含详细日志文件以及截图文件,截图文件用时间命名,比如20180107120432.png。 APP_ID = '10643473' API_KEY = 'N6qbo65qp120AAHHDralAKDD' SECRET_KEY = 'aKFHazKri9DmWYoE3zGWFstU6Hv9vMlg' # 冲顶大会截图剪切范围配置 CDDH_CONFIG = (0, 312, 1080, 1166) # 创建日志文件夹 name = time.strftime("%Y%m%d%H%M%S", time.localtime()) dirPath = './log/' + name os.mkdir(dirPath) log = Log(dirPath) def getTimeNow(): return time.time() timeInit = getTimeNow() log.record('开始生成流程...', False) client = AipOcr(APP_ID, API_KEY, SECRET_KEY) os.popen('adb shell screencap -p > ~/Documents/hq_auto/hq_auto/log/' + name + '/' + name + '.png') # 这里需要停300ms,防止图片还未生成 time.sleep(0.2) log.record('adb拉取截图完毕,耗费时间:' + str(int(getTimeNow() - timeInit)) + 'ms', False) im = Image.open('./log/' + name + '/' + name + '.png')
import discord import requests from logger import Log log = Log("birb") async def birb(ctx): log.begin("cyan", ctx) r = requests.get('http://random.birb.pw/tweet/') url = "https://random.birb.pw/img/" + str(r.text) embed = discord.Embed() embed.set_image(url=url) await ctx.send(embed=embed) log.end()
def __init__(self): self.tlc = TestlinkAPIClient(url,key) self.log = Log()
from logger import Log from threading import Thread from datetime import datetime, timedelta import time LOG = Log(__name__) """ Class to abstract threaded worker subtasks :param id: node identifier :param worker: the subtask worker object """ class WorkerThread(object): def __init__(self,worker,id): self.id = id self.worker = worker self.thread = None self.running = False self.start_time = 0 self.timeout = False """ Class to construct a threaded worker :param func: thread target function :param tasks: list of subtasks (WorkerThread) :param daemon: option to run task thread daemonized """ class WorkerTasks(object): def __init__(self,**kwargs): self.__func = kwargs.get('func') self.__tasks = kwargs.get('tasks')
def ready(self): Log()
from config.settings import * from config.amqp import * from logger import Log from json import dumps, loads from threading import Thread from kombu.mixins import ConsumerMixin from kombu import BrokerConnection from modules.worker import WorkerThread, WorkerTasks import signal, sys LOG = Log('kombu') """ Class to abstract AMQP consumer event handling :param callbacks: optional callbacks to be invoked on queue event :param amqp_url: optional AMQP URL to connect, defaults from config/amqp.py :param queue: The queue exchange to listen on for events :param max_retries: Number of connection attempts :param max_error: Max number of errored connection recovery attempts """ class AMQPWorker(ConsumerMixin): def __init__(self, **kwargs): self.__callbacks = kwargs.get('callbacks', self.on_message) self.__amqp_url = kwargs.get('amqp_url', AMQP_URL) self.__queue = kwargs.get('queue') self.__max_retries = kwargs.get('max_retries', 2) self.__max_error = kwargs.get('max_error', 3) if self.__queue is None: raise TypeError('invalid worker queue parameter') self.connection = BrokerConnection(self.__amqp_url)
#!/usr/local/bin/python # -*- coding: utf-8 -*- from django.contrib.auth.models import User from logger import Log; log = Log('goflow.workflow.pushapps') def route_to_requester(workitem): '''Simplest possible pushapp ''' return workitem.instance.user def route_to_user(workitem, username): '''Route to user given a username ''' return User.objects.get(username=username) def route_to_superuser(workitem, username='******'): '''Route to the superuser ''' user = User.objects.get(username=username) if user.is_superuser: return user log.warning('this user is not a super-user:'******'''Should be used in all push applications for testing purposes. (**NOT IMPLEMENTED**) usage::
parser.add_argument('--pooling', default='avg', type=str) parser.add_argument('--gcn_dim', default=150, type=int) parser.add_argument('--num_layers', default=2, type=int, help='num of AGGCN layer blocks') parser.add_argument('--gcn_dropout', default=0.5, type=float) parser.add_argument('--map_file', default='maps.pkl', type=str) parser.add_argument('--result_path', default='result', type=str) parser.add_argument('--emb_file', default='100.utf8', type=str) parser.add_argument('--train_file', default=os.path.join("data_doc", "train")) parser.add_argument('--dev_file', default=os.path.join("data_doc", "dev")) parser.add_argument('--test_file', default=os.path.join("data_doc", "test")) parser.add_argument('--log_name', default='test', type=str) parser.add_argument('--seed', default=1023, type=int) args = parser.parse_args() log = Log(args.log_name + ".log") log_handler = log.getLog() log_handler.info("\nArgs: ") for arg in vars(args): log_handler.info("{}: {}".format(arg, getattr(args, arg))) log_handler.info("\n") main()