def get_not_recommended_count_by_shop(shop): try: return UserShopRelationMethods.get_by_shop(shop).filter( is_recommended=UserShopRelationMethods.NOT_RECOMMENDED).count() except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return 0
def __init__(self, db_path, profile_path, plugin_path, _notification_disable_all_, _logs_, scriptname="O2TVGO/IPTVSimple", logId="O2TVGO/IPTVSimple"): self.db_path = db_path self.connection = False self.cursor = False self.connectDB() self.profile_path = profile_path self.plugin_path = plugin_path self._notification_disable_all_ = _notification_disable_all_ if _logs_: self._logs_ = _logs_ else: from logs import Logs self._logs_ = Logs(scriptname, logId) self.logIdSuffix = "/db.py/O2tvgoDB" self.scriptname = scriptname self.logId = logId self.exceptionRes = -1000 self.lockDefaultValue = -2000 self.tablesOK = False self.check_tables() self.cleanEpgDuplicates(doDelete=True) self.cleanChannelDuplicates(doDelete=True)
def __init__(self, environment, general_nn=None, inherit_nn=None): self.params = Params() if self.params.seed: seed(self.params.seed) self.creatures = [] self.entity_grid = np.zeros( (self.params.grid_size, self.params.grid_size, 4)) # 4 is for has_entity, id, strength, energy self.environment = environment if self.params.general_nn: # T self.random_policy = True else: self.random_policy = False self.general_nn = general_nn self.inherit_nn = inherit_nn self.exploration_rate = self.params.exploration_rate for _ in range(self.params.starting_creatures): self.spawn_creature() if self.params.verbose: self.logs = Logs(self.environment) self.logs.log_run() self.logs.log_header() self.batch_counter = self.general_nn.align_counter # starts 1 self.logs_random = not self.params.logs_random and self.random_policy
def get_file_name(url): try: names = url.split("/") return names[-1] except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
class Sql(object): __instance__ = None def __new__(cls): if cls.__instance__ is None: cls.__instance__ = object.__new__(cls) cls.__construct__(cls.__instance__) return cls.__instance__ def __construct__(self): object.__init__(self) self.__config = Config() self.__logs = Logs() # init connection param self._config_load() # connect try: self.__logs.debug('Try to connect to Mysql') self._connect = MySQLdb.connect(host=self.__host, user=self.__user, passwd=self.__pass, port=self.__port, db=self.__db) except Exception, e: self.__logs.fatal('Mysql connection failure: %s' % (e)) self.__logs.debug('Connection okay.')
def get_info(red_envelope): try: result = Objects.get_object_info(red_envelope) return result except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return dict()
def get_publishes_by_device(device_id): try: publishes = Publish.objects.filter(device_id=device_id) return publishes except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def get_attribute_by_user_and_publish(user_id, publish_id): try: upr = UserPublishRelations.objects.get(user_id=user_id, publish_id=publish_id) return upr.attribute except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return -1
def process_queue(self, worker_id): """Continuously processes tasks on the queue.""" # Create a new logs instance (with its own httplib2 instance) so that # there is a separate one for each thread. logs = Logs("twitter-listener-worker-%s" % worker_id, to_cloud=self.logs_to_cloud) logs.debug("Started worker thread: %s" % worker_id) while not self.stop_event.is_set(): try: data = self.queue.get(block=True, timeout=QUEUE_TIMEOUT_S) start_time = time() self.handle_data(logs, data) self.queue.task_done() end_time = time() qsize = self.queue.qsize() logs.debug("Worker %s took %.f ms with %d tasks remaining." % (worker_id, end_time - start_time, qsize)) except Empty: # Timed out on an empty queue. continue except Exception: # The main loop doesn't catch and report exceptions from # background threads, so do that here. logs.catch() logs.debug("Stopped worker thread: %s" % worker_id)
def to_utc(dt): try: dt = Datetimes.transfer_datetime(dt, is_utc=False) return dt.replace(tzinfo=utc) except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def __init__(self, logs_to_cloud): self.logs = Logs(name="trading", to_cloud=logs_to_cloud) # Get initial API keys from Questrade url = QUESTRADE_AUTH_API_URL % __builtin__.QUESTRADE_REFRESH_TOKEN method = "GET" body = "" headers = None client = Client(None, None) self.logs.debug("Questrade request: %s %s %s %s" % (url, method, body, headers)) response, content = client.request(url, method=method, body=body, headers=headers) self.logs.debug("Questrade response: %s %s" % (response, content)) try: response = loads(content) self.access_token = response['access_token'] self.api_server = response['api_server'] self.expires_in = datetime.now() + datetime.timedelta( 0, response['expires_in']) __builtin__.QUESTRADE_REFRESH_TOKEN = response['refresh_token'] self.token_type = response['token_type'] except ValueError: self.logs.error("Failed to retrieve initial API tokens: %s" % content)
def __init__(self, expirationMinutes): self.expirationMinutes = expirationMinutes self.cache = {} self.validUntil = datetime.now() + timedelta( minutes=self.expirationMinutes) self.logs = Logs(self.__class__.__name__)
def get_user_image(user): result = dict() if user: if user.userextension.big_image: user_image = files.BASE_URL_4_IMAGE + user.userextension.big_image.name # Logs.print_log("user_image", user_image) else: if user.userextension.gender == "M": user_image = USER_DEFAULT_MALE_ICON else: user_image = USER_DEFAULT_FEMALE_ICON else: user_image = USER_DEFAULT_ICON memory_file = files.Files.get_memory_file(user_image) user_image_path = "/".join(user_image.split("/")[:-1]) user_image_name = user_image.split("/")[-1] big_user_image_name = ".".join(user_image_name.split(".")[:-1]) + "_big." + user_image_name.split(".")[-1] small_user_image_name = ".".join(user_image_name.split(".")[:-1]) + "_small." + user_image_name.split(".")[-1] big_user_image = user_image_path + "/" + big_user_image_name small_user_image = user_image_path + "/" + small_user_image_name try: Images.resize_image(memory_file, big_user_image, 240) Images.resize_image(memory_file, small_user_image, 96) result["big_user_image"] = big_user_image result["small_user_image"] = small_user_image except Exception as ex: Logs.print_current_function_name_and_line_number(ex) if user.userextension.gender == "M": result["big_user_image"] = USER_DEFAULT_MALE_ICON result["small_user_image"] = USER_DEFAULT_MALE_ICON else: result["big_user_image"] = USER_DEFAULT_FEMALE_ICON result["small_user_image"] = USER_DEFAULT_FEMALE_ICON return result
def set_node_text(node, text): try: node.text = text return node except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def get_file_content(path): try: f = open(path, "r") return f.read() except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def sendMessage(self, fpath, folder): try: fname = ntpath.basename(fpath) Logs.Print("Sending email for " + folder + " and file: " + str(fname)) msg = MIMEMultipart() msg['Subject'] = "ALARM DETECTED ON " + folder msg['From'] = "ALERT! " + "<" + self.mail_from + ">" msg['To'] = self.mail_to part = MIMEBase('application', "octet-stream") part.set_payload(open(fpath, "rb").read()) encoders.encode_base64(part) part.add_header('Content-Disposition', 'attachment; filename="' + fname + '"') msg.attach(part) server = SMTP(self.smtp_server, timeout=10) server.set_debuglevel(0) #server.login(self.username, self.password) server.sendmail(msg['From'], msg['To'], msg.as_string()) Logs.Print('Message sent') except Exception as e: Logs.Print("Exception: " + str(e)) finally: if server: server.quit()
def get_category_id(name): try: category = ShopCategory.objects.get(name=name) return category.id except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return 0
def get_articles(user_id, status): try: ufs = UserForumArticle.objects.filter(user_id=user_id, status=status, count__gt=0) return ufs except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return UserForumArticleMethod.get_none()
def __init__(self): super().__init__(command_prefix=["봇 ", "봇"], shard_count=2) self.logger = Logs.create_logger(self) self.main_logger = Logs.main_logger() self.loop = asyncio.get_event_loop() self.loop.create_task(change_activity(self)) self.loop.create_task(self.set_db()) self.afk = {} self.blacklist = [] additional_commands = [ self.add_to_black, self.rest_black, self.show_black, ] with open("blacklist.pickle", "rb") as f: self.blacklist = pickle.load(f) with open("argument_help.json", "r", encoding="utf-8") as f: self.argument_data = json.load(f) for i in TOKEN.initial_extensions: self.load_extension(i) for cmd in additional_commands: self.add_command(cmd)
def __init__(self,logs_to_cloud): self.logs_to_cloud=logs_to_cloud self.logs=Logs(name="twitter",to_cloud=self.logs_to_cloud) self.twitter_auth=OAuthHandler(TWITTER_CONSUMER_KEY,TWITTER_CONSUMER_SECRET) self.twitter_auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) self.twitter_api=API(auth_handler=self.twitter_auth,retry_count=API_RETRY_COUNT,retry_delay=API_RETRY_DELAY_S,retry_errors=API_RETRY_ERRORS,wait_on_rate_limit="True",wait_on_rate_limit_notify=True) self.twitter_listener=None
def get_memory_file(url): try: # Logs.print_log("get memory file url", url) return cStringIO.StringIO(urllib2.urlopen(url, data=None, timeout=10).read()) except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def set_node_attribute(node, attribute_key, attribute_value): try: node.set(attribute_key, attribute_value) return node except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def get_user_by_id(user_id): try: u = User.objects.get(id=user_id) return u except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return Users.get_none()
def __init__(self, device_id, username, password, _logs_=None, scriptname=None, logId="O2TVGO/IPTVSimple"): self.username = username self.password = password self._live_channels = {} self.access_token = None self.subscription_code = None self.locality = None self.offer = None self.device_id = device_id ######## ADDED ######## self.channel_key = None self.epg_id = None self.forceFromTimestamp = None self.hoursToLoadFrom = None self.hoursToLoad = None self.logIdSuffix = "/o2tvgo.py/O2TVGO" self.scriptname = scriptname self.logId = logId if _logs_: self._logs_ = _logs_ else: from logs import Logs self._logs_ = Logs(scriptname, logId)
def get_user_extension(username): try: u = UserExtension.objects.get(user__username=username) return u except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return Users.get_none()
def get(name): try: # Logs.print_log("name", name) return Company.objects.get(name=name) except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
class Twitter: """A helper for talking to Twitter APIs.""" def __init__(self, logs_to_cloud): self.logs_to_cloud = logs_to_cloud self.logs = Logs(name="twitter", to_cloud=self.logs_to_cloud) self.twitter_auth = OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) self.twitter_auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) self.twitter_api = API(auth_handler=self.twitter_auth, retry_count=API_RETRY_COUNT, retry_delay=API_RETRY_DELAY_S, retry_errors=API_RETRY_ERRORS, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) self.twitter_listener = None def start_streaming(self, callback): """Starts streaming tweets and returning data to the callback.""" self.twitter_listener = TwitterListener( callback=callback, logs_to_cloud=self.logs_to_cloud) twitter_stream = Stream(self.twitter_auth, self.twitter_listener) self.logs.debug("Starting stream.") twitter_stream.filter(follow=[TRUMP_USER_ID]), ELONMUSK_USER_ID, WARRENBUFFETT_USER_ID, YOURANONNEWS_USER_ID, WIKILEAKS_USER_ID, SEC_NEWS_USER_ID, SEC_ENFORCEMENT_USER_ID, RICHARDBRANSON_USER_ID]) # If we got here because of an API error, raise it. if self.twitter_listener and self.twitter_listener.get_error_status(): raise Exception("Twitter API error: %s" % self.twitter_listener.get_error_status())
def __init__(self, enabled: bool, expirationSeconds: int, blockAfterFailures: int): self.database = {} self.enabled = enabled self.expirationSeconds = expirationSeconds self.blockAfterFailures = blockAfterFailures self.logs = Logs(self.__class__.__name__) self.security = Security()
def create_directory_path(path): if not Files.exists(path): try: Files.makedirs(path) except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None return path
def update(user_forum_article, increment=1): try: user_forum_article.count += increment user_forum_article.save() return user_forum_article except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return UserForumArticleMethod.get_none()
def set_shop_has_detector(shop, has_detector): try: shop.has_detector = has_detector shop.save() return shop except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return Shops.get_none()
def get_count_by_user(user_id, status): try: ufs = UserForumArticle.objects.filter(user_id=user_id, status=status) counts = [o.count for o in ufs] return reduce(operator.add, counts, 0) except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return 0
def __init__(self, logs_to_cloud): self.logs_to_cloud = logs_to_cloud self.logs = Logs(name="twitter", to_cloud=self.logs_to_cloud) self.twitter_auth = OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) self.twitter_auth.set_access_token(TWITTER_ACCESS_TOKEN, TWITTER_ACCESS_TOKEN_SECRET) self.twitter_api = API(self.twitter_auth)
def add(user_id, article_id, status): try: ufa = UserForumArticle(user_id=user_id, article_id=article_id, status=status) ufa.save() return ufa except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return UserForumArticleMethod.get_none()
def activate_detector_relation(detector_relation): try: detector_relation.state = True detector_relation.save() return detector_relation except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def update_unread(comment): try: comment.is_read = True comment.save() return True except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return False
def main(): Logs.setup_logger() setup_minimal_vpn_platform(message_client_name='vpn_service') logger.info('Starting VPN service') heartbeat_service = HeartbeatService() heartbeat_service.start() heartbeat_service.run_heartbeat()
def static_update(address, address_dict): try: for (key, value) in address_dict.items(): Objects.set_value(address, key, value) address.save() return address except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return Address.get_none()
def save_image(memory_file, path): data = {} try: img = Image.open(memory_file) img.save(path) return True except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return False
def update(publish, publish_dict): try: for (key, value) in publish_dict.items(): Objects.set_value(publish, key, value) publish.save() return publish except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def _web_info(self): if self.status_code == 200 or self.req_info != False: self.soup = BeautifulSoup(self.req_info.text,'lxml') return True else: #如果状态码不等于200,写入错误日志 logs = Logs(self.url,self.status_code,self.date_time) logs.logs_write() return False
def update_shop(shop, shop_dict): try: for (key, value) in shop_dict.items(): Objects.set_value(shop, key, value) shop.save() return shop except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return Shops.get_none()
def __init__(self, _logs_ = None, scriptname="O2TVGO/IPTVSimple", logId="O2TVGO/IPTVSimple"): if _logs_: self._logs_ = _logs_ else: from logs import Logs self._logs_ = Logs(scriptname, logId) self.logIdSuffix = "/jsonrpc.py/JsonRPC" self.scriptname = scriptname self.logId = logId
def __init__(self, expirationMinutes: int): self.expirationMinutes = expirationMinutes self.cache = {} self.validUntil = datetime.now() + timedelta( minutes=self.expirationMinutes) self.groupCaseSensitive = True self.groupConditional = 'and' self.logs = Logs(self.__class__.__name__)
def get(username, start_datetime, end_datetime): try: start_datetime = Datetimes.string_to_utc(start_datetime) end_datetime = Datetimes.string_to_utc(end_datetime) return Game.objects.filter(username=username, created_at__gte=start_datetime, created_at__lte=end_datetime).order_by("id") except Exception as ex: Logs.print_current_function_name_and_line_number(ex) return None
def _web_info(self): if self.status_code == 200 or self.req_info != False: self.soup = BeautifulSoup(self.req_info.text, 'lxml') return True else: #如果状态码不等于200,写入错误日志 logs = Logs(self.url, self.status_code, self.date_time) logs.logs_write() return False
def _get_404(self): #通过bs中的select来获取 info = self._get_select('header') try: for index in info: if index.text[:3] == '404': log = Logs(self.root_url, '404', str(self.date_time)[:-7]) log.logs_write() return False else: return True except: return True
def app(self): Logs.Print("Starting Hermes App..") Logs.Print("Paths to monitor:") for monitor_path in self.conf_data['monitor_paths']: Logs.Print("{0}: {1}".format(monitor_path['name'], monitor_path['path'])) Logs.Print("Archive path: {0}".format(self.conf_data['archive_path'])) while 1: try: for monitor_path in self.conf_data['monitor_paths']: # Check if there are any files in these paths files = self.file_system.list_files(monitor_path['path']) # check if the files are ok to be processed for file in files: if os.path.getsize( file) < self.conf_data['file_size_min']: current_time = time.time() creation_time = os.path.getctime(file) if ( current_time - creation_time ) < 60: # older than 1 minutes are ok to be processed Logs.Print( "File: {0} is too small to be processed.". format(file)) files.remove(file) # wait 2 seconds time.sleep(2) if files: time_now = datetime.datetime.utcnow() print("\n\n") Logs.Print( "======================================================" ) Logs.Print( str(time_now) + ": Found alarm on " + monitor_path['name']) # Create google_drive object google_drive = MyGoogleDrive( self.conf_data['gdrive_cred_path']) # Upload images into google drive for file in files: google_drive.push(file, monitor_path['gdrive_folder_id']) self.mail.sendMessage(file, monitor_path['name']) self.file_system.archive_files(files, monitor_path['name']) except Exception as e: Logs.Print("Exception: " + str(e))
def __init__(self): self.logs = Logs(name="twitter") self.twitter_auth = OAuthHandler(settings.CONSUMER_KEY, settings.CONSUMER_SECRET) self.twitter_auth.set_access_token(settings.ACCESS_TOKEN, settings.ACCESS_SECRET) self.twitter_api = API(auth_handler=self.twitter_auth, retry_count=API_RETRY_COUNT, retry_delay=API_RETRY_DELAY_S, retry_errors=API_RETRY_ERRORS, wait_on_rate_limit=True, wait_on_rate_limit_notify=True) self.twitter_listener = None
def run(self): """ Fungsi awal saat program dijalankan """ if (self._isDebug): print(s("Mode Debug Active")) if self._link is None: self.tanyaLink() cls() B = Banner() B.cetakbanner() cetakgaris("Pilih Resolusi Video") self._YT = YouTube(self._link) self._YT.check_availability self.infoVideo() print("") time.sleep(3) pilihan = self.resolusi() cek = Logs() if (not cek.cek()): print(s("%s" % prRed("Lokasi penyimpanan belum diset "))) cek.gantiPath() self._savePath = cek._path cls() B.cetakbanner() cetakgaris("Please Wait.. Downloading") try: if pilihan == 'video': super(DownloadYT, self).Downloaderffmpeg(self._Audio, self._Video, self._savePath) elif (pilihan == 'audio'): super(DownloadYT, self).DownloadMP3(self._Audio, self._savePath) else: raise ValueError("Error") except: print(s("Terjadi kesalahan!")) return sys.exit(prCyan("Terima kasih! ;) "))
def main(): logs = Logs() requestService = RequestService(api_name="Api_name", logs=logs) if not len(sys.argv[1:]): usage() args = parse_args() if args.log_file_name: logs.write_file(filename=args.log_file_name) if args.target_url: requestService.add_endpoints(args.target_url) print(requestService.start()) elif args.file_to_read: requestService.read_endpoints_from_file(args.file_to_read) print(requestService.start())
def archive_files(self, files, folder): try: Logs.Print("Archiving files to " + folder) for file in files: oldfile = file file_date = str(self.get_creation_date(file)) newfile = self.archive + "/" + str(file_date).replace( ' ', '_').replace('-', '_').replace( ':', '_') + "_" + folder + ".jpg" Logs.Print("Archiving file: " + oldfile + " to " + newfile) os.rename(oldfile, newfile) except Exception as e: Logs.Print("Exception: " + str(e))
def __init__(self, ldapEndpoint, dnUsername, dnPassword, bindDN, searchBase, searchFilter, groupCaseSensitive, groupConditional): self.ldapEndpoint = ldapEndpoint self.searchBase = searchBase self.dnUsername = dnUsername self.dnPassword = dnPassword self.bindDN = bindDN self.searchFilter = searchFilter self.groupConditional = groupConditional.lower() self.groupCaseSensitive = groupCaseSensitive ldap.set_option(ldap.OPT_X_TLS_REQUIRE_CERT, ldap.OPT_X_TLS_NEVER) self.connect = ldap.initialize(self.ldapEndpoint) self.connect.set_option(ldap.OPT_REFERRALS, 0) self.connect.set_option(ldap.OPT_DEBUG_LEVEL, 255) self.logs = Logs(self.__class__.__name__)
def cleanup(self, days, gdrive_folder_ids): try: date_N_days_ago = datetime.now() - timedelta(days=days) files = self.drive.ListFile({ "q": "trashed = false and modifiedDate < '" + str(date_N_days_ago).split(' ')[0] + "'", "maxResults": 1000 }).GetList() for file in files: if file['id'] not in gdrive_folder_ids: Logs.Print(file['title'] + " will be deleted") file.Delete() except Exception as e: Logs.Print("Exception: " + str(e))
def __lock_pid(self): self.pid = str(os.getpid()) if os.path.isfile(self.pidfile): Logs.Print("%s already exists, exiting" % self.pidfile) sys.exit() with open(self.pidfile, 'w') as fwrie: fwrie.write(self.pid)
def __start_work(self, quest_name, timestamp): assert_person(quest_name in self.quests, f'quest named {quest_name} not found') assert_person(self.quests[quest_name]['start_timestamp'] is None, f'quest named {quest_name} is already started') quest = self.quests[quest_name] # проверить, что указанное время старта равно или позже последнего лога по этому квесту logs_obj = Logs(self.logs_path) logs = logs_obj.get_logs(select=['timestamp'], where=f'quest_id="{quest["id"]}" AND action="log_work"') if logs: last_log_timestamp = logs[-1][0] last_log_timestamp = parse_timestamp(last_log_timestamp) assert_person(last_log_timestamp <= timestamp, f'the given start datetime ({timestamp})' f' is before a last log of the quest ({last_log_timestamp})') quest['start_timestamp'] = timestamp logs_obj.log( timestamp=str(timestamp), quest_id=quest['id'], action='start_work', points={}, bookmark='') logs_obj.close()
def twitter_callback(self, tweet): """Analyzes Trump tweets, trades stocks, and tweets about it.""" # Initialize the Analysis, Logs, Trading, and Twitter instances inside # the callback to create separate httplib2 instances per thread. # analysis = Analysis() # logs = Logs(name="main-callback") logs = Logs(name="callback") self.logs.info("twitter_callback starts")
def push(self, file_path, folder_id): try: filename = os.path.basename(file_path) Logs.Print('Uploading file: ' + str(filename) + " on gdrive folder: " + str(folder_id)) textfile = self.drive.CreateFile({ 'title': filename, 'mimeType': 'image/jpg', "parents": [{ "kind": "drive#fileLink", "id": folder_id }] }) textfile.SetContentFile(file_path) textfile.Upload() except Exception as e: Logs.Print("Exception: " + str(e))
def __init__(self, my_binding, sendto_sock): # 事件定义 self.path = None self.file_out = None # 每一个实例只能存一个文件,接收完就关闭。再发送需要再新建一个实例 self.my_binding = my_binding self.target = sendto_sock self.__running = Event() # 主线程 self.__thread_main = Thread(target=self.__main_thread) self.__event_mainthread = Event() # 上一条接收的seq self.lastseq = -1 # 接收窗口 self.seq_expected = 0 # send_seq&info self.seq_and_info = [] # 接收到的信息 self.recv_info = [] self.info_len_expected = 1000 self.logs = Logs()
def __init__(self, logs_to_cloud): self.logs_to_cloud = logs_to_cloud self.logs = Logs(name="twitter", to_cloud=self.logs_to_cloud) self.twitter = {} for account in accounts: auth = OAuthHandler(TWITTER_CONSUMER_KEY, TWITTER_CONSUMER_SECRET) auth.set_access_token( account.twitter_access_token, account.twitter_access_token_secret ) api = API( auth_handler=auth, retry_count=API_RETRY_COUNT, retry_delay=API_RETRY_DELAY_S, retry_errors=API_RETRY_ERRORS, wait_on_rate_limit=True, wait_on_rate_limit_notify=True, ) self.twitter[account.bot_account_id] = (auth, api) self.twitter_listener = None