def __init__(self,logs_to_cloud): self.logs_to_cloud=logs_to_cloud self.logs=Logs(name="twitter",to_cloud=self.logs_to_cloud) self.twitter_auth=OAuthHandler(TWITTER_CONSUMER_KEY,TWITTER_CONSUMER_SECRET) self.twitter_auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) self.twitter_api=API(auth_handler=self.twitter_auth,retry_count=API_RETRY_COUNT,retry_delay=API_RETRY_DELAY_S,retry_errors=API_RETRY_ERRORS,wait_on_rate_limit="True",wait_on_rate_limit_notify=True) self.twitter_listener=None
def __init__(self, device_id, username, password, _logs_=None, scriptname=None, logId="O2TVGO/IPTVSimple"): self.username = username self.password = password self._live_channels = {} self.access_token = None self.subscription_code = None self.locality = None self.offer = None self.device_id = device_id ######## ADDED ######## self.channel_key = None self.epg_id = None self.forceFromTimestamp = None self.hoursToLoadFrom = None self.hoursToLoad = None self.logIdSuffix = "/o2tvgo.py/O2TVGO" self.scriptname = scriptname self.logId = logId if _logs_: self._logs_ = _logs_ else: from logs import Logs self._logs_ = Logs(scriptname, logId)
def __start_work(self, quest_name, timestamp): assert_person(quest_name in self.quests, f'quest named {quest_name} not found') assert_person(self.quests[quest_name]['start_timestamp'] is None, f'quest named {quest_name} is already started') quest = self.quests[quest_name] # проверить, что указанное время старта равно или позже последнего лога по этому квесту logs_obj = Logs(self.logs_path) logs = logs_obj.get_logs(select=['timestamp'], where=f'quest_id="{quest["id"]}" AND action="log_work"') if logs: last_log_timestamp = logs[-1][0] last_log_timestamp = parse_timestamp(last_log_timestamp) assert_person(last_log_timestamp <= timestamp, f'the given start datetime ({timestamp})' f' is before a last log of the quest ({last_log_timestamp})') quest['start_timestamp'] = timestamp logs_obj.log( timestamp=str(timestamp), quest_id=quest['id'], action='start_work', points={}, bookmark='') logs_obj.close()
def process_queue(self, worker_id): """Continuously processes tasks on the queue.""" # Create a new logs instance (with its own httplib2 instance) so that # there is a separate one for each thread. logs = Logs("twitter-listener-worker-%s" % worker_id, to_cloud=self.logs_to_cloud) logs.debug("Started worker thread: %s" % worker_id) while not self.stop_event.is_set(): try: data = self.queue.get(block=True, timeout=QUEUE_TIMEOUT_S) start_time = time() self.handle_data(logs, data) self.queue.task_done() end_time = time() qsize = self.queue.qsize() logs.debug("Worker %s took %.f ms with %d tasks remaining." % (worker_id, end_time - start_time, qsize)) except Empty: # Timed out on an empty queue. continue except Exception: # The main loop doesn't catch and report exceptions from # background threads, so do that here. logs.catch() logs.debug("Stopped worker thread: %s" % worker_id)
def __init__(self, logs_to_cloud): self.logs = Logs(name="trading", to_cloud=logs_to_cloud) # Get initial API keys from Questrade url = QUESTRADE_AUTH_API_URL % __builtin__.QUESTRADE_REFRESH_TOKEN method = "GET" body = "" headers = None client = Client(None, None) self.logs.debug("Questrade request: %s %s %s %s" % (url, method, body, headers)) response, content = client.request(url, method=method, body=body, headers=headers) self.logs.debug("Questrade response: %s %s" % (response, content)) try: response = loads(content) self.access_token = response['access_token'] self.api_server = response['api_server'] self.expires_in = datetime.now() + datetime.timedelta( 0, response['expires_in']) __builtin__.QUESTRADE_REFRESH_TOKEN = response['refresh_token'] self.token_type = response['token_type'] except ValueError: self.logs.error("Failed to retrieve initial API tokens: %s" % content)
def __init__(self, expirationMinutes): self.expirationMinutes = expirationMinutes self.cache = {} self.validUntil = datetime.now() + timedelta( minutes=self.expirationMinutes) self.logs = Logs(self.__class__.__name__)
def __init__(self, environment, general_nn=None, inherit_nn=None): self.params = Params() if self.params.seed: seed(self.params.seed) self.creatures = [] self.entity_grid = np.zeros( (self.params.grid_size, self.params.grid_size, 4)) # 4 is for has_entity, id, strength, energy self.environment = environment if self.params.general_nn: # T self.random_policy = True else: self.random_policy = False self.general_nn = general_nn self.inherit_nn = inherit_nn self.exploration_rate = self.params.exploration_rate for _ in range(self.params.starting_creatures): self.spawn_creature() if self.params.verbose: self.logs = Logs(self.environment) self.logs.log_run() self.logs.log_header() self.batch_counter = self.general_nn.align_counter # starts 1 self.logs_random = not self.params.logs_random and self.random_policy
def __init__(self, db_path, profile_path, plugin_path, _notification_disable_all_, _logs_, scriptname="O2TVGO/IPTVSimple", logId="O2TVGO/IPTVSimple"): self.db_path = db_path self.connection = False self.cursor = False self.connectDB() self.profile_path = profile_path self.plugin_path = plugin_path self._notification_disable_all_ = _notification_disable_all_ if _logs_: self._logs_ = _logs_ else: from logs import Logs self._logs_ = Logs(scriptname, logId) self.logIdSuffix = "/db.py/O2tvgoDB" self.scriptname = scriptname self.logId = logId self.exceptionRes = -1000 self.lockDefaultValue = -2000 self.tablesOK = False self.check_tables() self.cleanEpgDuplicates(doDelete=True) self.cleanChannelDuplicates(doDelete=True)
def __init__(self, enabled: bool, expirationSeconds: int, blockAfterFailures: int): self.database = {} self.enabled = enabled self.expirationSeconds = expirationSeconds self.blockAfterFailures = blockAfterFailures self.logs = Logs(self.__class__.__name__) self.security = Security()
def __init__(self, logs_to_cloud): self.logs_to_cloud = logs_to_cloud self.logs = Logs(name="twitter", to_cloud=self.logs_to_cloud) self.twitter_auth = OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) self.twitter_auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) self.twitter_api = API(self.twitter_auth)
def _web_info(self): if self.status_code == 200 or self.req_info != False: self.soup = BeautifulSoup(self.req_info.text, 'lxml') return True else: #如果状态码不等于200,写入错误日志 logs = Logs(self.url, self.status_code, self.date_time) logs.logs_write() return False
def __init__(self, expirationMinutes: int): self.expirationMinutes = expirationMinutes self.cache = {} self.validUntil = datetime.now() + timedelta( minutes=self.expirationMinutes) self.groupCaseSensitive = True self.groupConditional = 'and' self.logs = Logs(self.__class__.__name__)
def twitter_callback(self, tweet): """Analyzes Trump tweets, trades stocks, and tweets about it.""" # Initialize the Analysis, Logs, Trading, and Twitter instances inside # the callback to create separate httplib2 instances per thread. # analysis = Analysis() # logs = Logs(name="main-callback") logs = Logs(name="callback") self.logs.info("twitter_callback starts")
def __init__(self, _logs_ = None, scriptname="O2TVGO/IPTVSimple", logId="O2TVGO/IPTVSimple"): if _logs_: self._logs_ = _logs_ else: from logs import Logs self._logs_ = Logs(scriptname, logId) self.logIdSuffix = "/jsonrpc.py/JsonRPC" self.scriptname = scriptname self.logId = logId
def _get_404(self): #通过bs中的select来获取 info = self._get_select('header') try: for index in info: if index.text[:3] == '404': log = Logs(self.root_url, '404', str(self.date_time)[:-7]) log.logs_write() return False else: return True except: return True
def __init__(self): self.logs = Logs(name="twitter") self.twitter_auth = OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET) self.twitter_auth.set_access_token(settings.ACCESS_TOKEN, settings.ACCESS_SECRET) self.twitter_api = API(auth_handler=self.twitter_auth, retry_count=API_RETRY_COUNT, retry_delay=API_RETRY_DELAY_S, retry_errors=API_RETRY_ERRORS, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) self.twitter_listener = None
def run(self): """ Fungsi awal saat program dijalankan """ if (self._isDebug): print(s("Mode Debug Active")) if self._link is None: self.tanyaLink() cls() B = Banner() B.cetakbanner() cetakgaris("Pilih Resolusi Video") self._YT = YouTube(self._link) self._YT.check_availability self.infoVideo() print("") time.sleep(3) pilihan = self.resolusi() cek = Logs() if (not cek.cek()): print(s("%s" % prRed("Lokasi penyimpanan belum diset "))) cek.gantiPath() self._savePath = cek._path cls() B.cetakbanner() cetakgaris("Please Wait.. Downloading") try: if pilihan == 'video': super(DownloadYT, self).Downloaderffmpeg(self._Audio, self._Video, self._savePath) elif (pilihan == 'audio'): super(DownloadYT, self).DownloadMP3(self._Audio, self._savePath) else: raise ValueError("Error") except: print(s("Terjadi kesalahan!")) return sys.exit(prCyan("Terima kasih! ;) "))
def main(): logs = Logs() requestService = RequestService(api_name="Api_name", logs=logs) if not len(sys.argv[1:]): usage() args = parse_args() if args.log_file_name: logs.write_file(filename=args.log_file_name) if args.target_url: requestService.add_endpoints(args.target_url) print(requestService.start()) elif args.file_to_read: requestService.read_endpoints_from_file(args.file_to_read) print(requestService.start())
def __init__(self, ldapEndpoint, dnUsername, dnPassword, bindDN, searchBase, searchFilter, groupCaseSensitive, groupConditional): self.ldapEndpoint = ldapEndpoint self.searchBase = searchBase self.dnUsername = dnUsername self.dnPassword = dnPassword self.bindDN = bindDN self.searchFilter = searchFilter self.groupConditional = groupConditional.lower() self.groupCaseSensitive = groupCaseSensitive ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) self.connect = ldap.initialize(self.ldapEndpoint) self.connect.set_option(ldap.OPT_REFERRALS, 0) self.connect.set_option(ldap.OPT_DEBUG_LEVEL, 255) self.logs = Logs(self.__class__.__name__)
def __log_work(self, quest_name, timestamp, points, bookmark): """ :param quest_name: 'name' :param timestamp: pandas.Timestamp object :param points: {'Minuten': 48, } :param bookmark: 'any text' """ assert_person(quest_name in self.quests, f'quest named {quest_name} not found') quest = self.quests[quest_name] assert_person(quest['start_timestamp'] is not None, f'quest named {quest_name} is not started') assert_person(quest['start_timestamp'] <= timestamp, f'the given log timestamp ({timestamp})' f' is after a start timestamp of the quest ({quest["start_timestamp"]})') for point_name in points: assert_person(point_name in quest['points'], f'point named {point_name} is not in quest {quest_name} points') quest['bookmark'] = bookmark for point_name in points: point = quest['points'][point_name] point['total_points'] += points[point_name] date = timestamp.date() if quest['first_date'] is None: quest['first_date'] = date quest['last_date'] = date quest['start_timestamp'] = None logs_obj = Logs(self.logs_path) logs_obj.log( timestamp=str(timestamp), quest_id=quest['id'], action='log_work', points=points, bookmark=bookmark) logs_obj.close()
def __init__(self, logs_to_cloud): self.logs_to_cloud = logs_to_cloud self.logs = Logs(name="twitter", to_cloud=self.logs_to_cloud) self.twitter = {} for account in accounts: auth = OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token( account.twitter_access_token, account.twitter_access_token_secret ) api = API( auth_handler=auth, retry_count=API_RETRY_COUNT, retry_delay=API_RETRY_DELAY_S, retry_errors=API_RETRY_ERRORS, wait_on_rate_limit=True, wait_on_rate_limit_notify=True, ) self.twitter[account.bot_account_id] = (auth, api) self.twitter_listener = None
def __init__(self, my_binding, sendto_sock): # 事件定义 self.path = None self.file_out = None # 每一个实例只能存一个文件,接收完就关闭。再发送需要再新建一个实例 self.my_binding = my_binding self.target = sendto_sock self.__running = Event() # 主线程 self.__thread_main = Thread(target=self.__main_thread) self.__event_mainthread = Event() # 上一条接收的seq self.lastseq = -1 # 接收窗口 self.seq_expected = 0 # send_seq&info self.seq_and_info = [] # 接收到的信息 self.recv_info = [] self.info_len_expected = 1000 self.logs = Logs()
def process_queue(self, worker_id): """Continuously processes tasks on the queue.""" # Create a new logs instance (with its own httplib2 instance) so that # there is a separate one for each thread. logs = Logs("twitter-listener-worker-%s" % worker_id, to_cloud=self.logs_to_cloud) logs.debug("Started worker thread: %s" % worker_id) while not self.stop_event.is_set(): # The main loop doesn't catch and report exceptions from background # threads, so do that here. try: size = self.queue.qsize() logs.debug("Processing queue of size: %s" % size) data = self.queue.get(block=True) self.handle_data(logs, data) self.queue.task_done() except BaseException as exception: logs.catch(exception) logs.debug("Stopped worker thread: %s" % worker_id)
def twitter_callback(self, tweet): """Analyzes Trump tweets, trades stocks, and tweets about it.""" # Initialize the Analysis, Logs, Trading, and Twitter instances inside # the callback to create separate httplib2 instances per thread. analysis = Analysis(logs_to_cloud=LOGS_TO_CLOUD) logs = Logs(name="main-callback", to_cloud=LOGS_TO_CLOUD) # Analyze the tweet. companies = analysis.find_companies(tweet) logs.info("Using companies: %s" % companies) if not companies: return # Trade stocks. trading = Trading(logs_to_cloud=LOGS_TO_CLOUD) # trading.make_trades(companies) # Tweet about it. twitter = Twitter(logs_to_cloud=LOGS_TO_CLOUD) twitter.tweet(companies, tweet)
def __init__(self, path=None): self.path = path if path else config['path'] self.bak_path = config['bak'] self.log_path = config['log'] self.dir_files = DirFiles(self.bak_path) self.logs = Logs(config['log']) self.dir_files.create_dirs(self.log_path) self.year_month = YearMonth(config['start'], config['end']) self.current_month = config['current_month'] self.count = config['count'] self.disable_date = config['disable_date'] # log self.check_log = self.dir_files.get_check_path( self.log_path, self.year_month.create_date(status=1) + '_checked.log') self.hot_log = self.dir_files.get_check_path(self.log_path, 'hot.log') self.checked_lists = self.logs.read_log( self.check_log) if self.dir_files.check_file( self.check_log) else [] self.hot_lists = self.logs.read_log( self.hot_log) if self.dir_files.check_file(self.hot_log) else [] # 鉴黄sdk self.appid = config['appid'] self.secret_id = config['secret_id'] self.secret_key = config['secret_key'] self.sdk = SDK(self.appid, self.secret_id, self.secret_key) # 企业微信 self.weixin = WeiXin( corpid=config['weixin']['corpid'], secrect=config['weixin']['secrect'], agentid=config['weixin']['agentid'], touser=config['weixin']['touser'], product=config['product'], toparty=config['weixin']['toparty'], )
def __init__(self, jsonfile, logfile, sipcallfile, optsUpdateUI=None): """ Init for the Worker class """ print("{0}------------ INIT FOR DOOR SENSOR CLASS! ----------------{1}" .format(bcolors.HEADER, bcolors.ENDC)) # Global Variables self.jsonfile = jsonfile self.logfile = logfile self.sipcallfile = sipcallfile self.settings = self.ReadSettings() self.limit = 10 self.logtypes = 'all' # Stop execution on exit self.kill_now = False # Init Alarm self.mynotify = Notify(self.settings) self.mynotify.setupUpdateUI(optsUpdateUI) self.mynotify.setupSendStateMQTT() mylogs = Logs(self.logfile) self.getSensorsLog = mylogs.getSensorsLog self.writeLog("system", "Alarm Booted") mylogs.startTrimThread() # Event Listeners self.sensors = Sensor() self.sensors.on_alert(self.sensorAlert) self.sensors.on_alert_stop(self.sensorStopAlert) self.sensors.on_error(self.sensorError) self.sensors.on_error_stop(self.sensorStopError) self.sensors.add_sensors(self.settings) # Init MQTT Messages self.mynotify.on_disarm_mqtt(self.deactivateAlarm) self.mynotify.on_arm_mqtt(self.activateAlarm) self.mynotify.on_sensor_set_alert(self.sensorAlert) self.mynotify.on_sensor_set_stopalert(self.sensorStopAlert) self.mynotify.sendStateMQTT()
def twitter_callback(self, tweet): """Analyzes Trump tweets, trades stocks, and tweets about it.""" # save the tweet alltweets = [] screen_name = "realDonaldTrump" toList(tweet, alltweets) writeToFile(alltweets, screen_name) writeToDB(alltweets, screen_name) # Initialize the Analysis, Logs, Trading, and Twitter instances inside # the callback to create separate httplib2 instances per thread. analysis = Analysis() logs = Logs(name="main-callback") self.logs.info("twitter_callback starts") #Analyze the tweet. companies = analysis.find_companies(tweet) logs.info("Using companies: %s" % companies) if not companies: return
def __init__(self, o2tv, channel_key, name, logo_url, weight, _logs_=None, scriptname="O2TVGO/IPTVSimple", logId="O2TVGO/IPTVSimple"): self._o2tv = o2tv self.channel_key = channel_key self.name = name self.weight = weight self.logo_url = logo_url self.logIdSuffix = "/o2tvgo.py/LiveChannel" self.scriptname = scriptname self.logId = logId if _logs_: self._logs_ = _logs_ else: from logs import Logs self._logs_ = Logs(scriptname, logId)
def main(): parametros = validacao_parametros() global mqtt global logs mqtt = Mqtt(parametros['broker'], parametros['user'], parametros['password'], parametros['topic']) logs = Logs(parametros['group'], parametros['stream']) scheduler = sched.scheduler(time.time, time.sleep) schedule_it(scheduler, int(parametros['messages']), int(parametros['seconds']), publisher) mqtt.connect() mqtt.client.loop_start() scheduler.run() mqtt.client.loop_stop() mqtt.client.disconnect()
def __init__(self, my_binding, sendto_sock, filelist): self.mybinding = my_binding self.target = sendto_sock self.filelist = filelist self.fileptr = args.sending_window_size - 1 self.list_len = len(filelist) '''Events''' self.__event_mainthread = Event() self.__event_send = Event() self.__event_endsend = Event() self.__event_slide = Event() self.__event_endslide = Event() self.__event_timer = Event() self.__event_timeout = Event() self.__running = Event() '''Threads''' self.__thread_main = Thread(target=self.__main_thread) # 主线程 self.__thread_sending = Thread(target=self.__thread_send) # 发送线程 self.__thread_sliding = Thread(target=self.__thread_slide) # 滑窗线程 self.__thread_timeouting = Thread( target=self.__thread_timeouter) # 超时控制定时器线程池 '''Utils''' # 最大序号 self.max_no = args.max_sending_no # 发送窗口序号列表 self.sw_nolist = list(range(0, args.sending_window_size)) # 发送窗口帧列表 self.sw_pdulist = [ PDU(seq=i, info=self.filelist[i]) for i in self.sw_nolist ] self.sw_timeouter = [None] * args.sending_window_size self.last_sent = -1 # 记录最后一个已发送的帧 self.have_sent = -1 # 用于超时后记录已发送的帧 self.sw_recvlist = [] # 记录收到的帧的ack self.success_sent = 0 # 成功发送并接收的包数 self.TO_flag = False # 超时标志 self.RT_flag = False # 重传标志 self.logs = Logs()