def get_paths(self, start, end) -> [Path]: paths = list(filter(lambda path: path.source.id == start and path.destination.id == end, self.paths)) if len(paths) == 0: Logger.print("Cannot get path for {} -> {}".format(str(start), str(end))) return [] else: return self.sort_paths_by_len(paths)
def process(self): Logger.debug('[Login Challenge]: processing') self._parse_data() try: current_account = AccountManager.get_account(self.account_name) if current_account is None: raise Exception('Account \'{}\' is not found'.format( self.account_name)) current_account.os = self.os current_account.ip_addr = '.'.join([str(i) for i in self.ip_addr]) current_account.platform = self.platform current_account.timezone = self.timezone current_account.locale = self.locale AccountManager.update_account(current_account) session.current_account = current_account # TODO: define account exceptions except Exception as e: Logger.error('[Login Challenge]: {}'.format(e)) return None finally: return self._get_response()
def get_node(self, node_id) -> Node: node = list(filter(lambda node: node.id == node_id, self.nodes)) if len(node) != 1: Logger.print("Cannot get link {}".format(node_id)) exit(1) else: return node[0]
def get_link(self, link_id) -> Link: link = list(filter(lambda link: link.id == link_id, self.links)) if len(link) != 1: Logger.print("Cannot get link {}".format(link_id)) exit(1) else: return link[0]
def __kaptcha(self, img_content): Logger.info("正在自动识别登录验证码") img_base64 = base64.b64encode(img_content) header = { 'Authorization': 'APPCODE ac83c83601fb40e2bf2436598cc75faf', 'Content-Type': 'application/x-www-form-urlencoded; charset=UTF-8' } data = { 'convert_to_jpg': '0', 'img_base64': img_base64, 'typeId': '35' } result = requests.post( url="https://ali-checkcode2.showapi.com/checkcode", headers=header, data=data).text try: result = json.loads(result) if result['showapi_res_code'] == 0: if result['showapi_res_body']['ret_code'] == 0: code = result['showapi_res_body']['Result'] Logger.info('验证码识别成功,结果为' + str(code)) return code else: Logger.warning('验证码识别失败') return False else: Logger.warning('Api调用失败,错误信息' + result['showapi_res_error']) return False except AttributeError: Logger.error('验证码识别出现异常') return False
def job(): logger.LogInfo("Working...") youtube_link = redis.lpop(os.getenv('REDIS_YOUTUBE_VIDEO_LIST')) if (youtube_link): youtube_link = youtube_link.decode('ascii') # print("youtube_data: ", youtube_link) extract_data(youtube_link) logger.LogInfo("Job Done")
def _generate_server_hash(self): Logger.info('[Auth Session Manager]: generating server hash') auth_seed = session.auth_seed del session.auth_seed to_hash = (self.account_name.encode('ascii') + bytes(4) + self.client_seed + int.to_bytes(auth_seed, 4, 'little') + self.session_key) self.server_hash = sha1(to_hash).digest()
def __init__(self): self.checkhttp = PROXY['HTTP'] self.checkhttps = PROXY['HTTPS'] self.validate_thread = PROXY['VALIDATE_THREAD'] self.proxy = {'http': '', 'https': ''} self.pool = Pool(self.validate_thread) self.log = Logger('ProxyValidate') self.validip = {}
def __init__(self, URL): if type(URL) != str: print('Unexpected URL') raise TypeError else: self.URL = URL self.site_name = URL.split('//')[1].split('/')[0] self.log = Logger(self.URL)
def __init__(self): self.current_account = None self.is_authenticated = False self.is_connection_accepted = False self.srp = SRP() self.recon_challenge = None self.auth_seed = None self.header_crypt = None Logger.error('Session generated')
def __init__(self, apiurl, username="", password="", split="\r\n", other={}): self.apiurl = apiurl self.username = username self.password = password self.split = split self.other = other self.open = True self.valide = ProxyValidate() self.log = Logger('ApiGetIp')
def __init__(self, config): Logger.info("欢迎进入主服务器系统,即将开始进行配置验证") self.identity = config.identity self.redis_host = config.redis_host self.redis_port = config.redis_port self.redis_database = config.redis_database self.redis_password = config.redis_password self.conn = self.__conn_redis() Logger.info("配置验证完成,启动主服务器系统") self._run()
def fix_csv(bigcsv, smallcsv): summary_data = genfromtxt(smallcsv, delimiter=';', dtype='U') filename = re.search('/([0-9]+_[0-9]+)', smallcsv).group(1) demands_file = "{}.dem".format(filename) dem_spec = {'net_avg': summary_data[1][0]} number_of_cores = summary_data[1][4] duration = summary_data[1][5] Logger.create_final_summary(dem_spec, demands_file, number_of_cores, duration, 'summary_fixed', bigcsv)
def _parse_data(self): try: parsed_data = unpack(LoginProof.LOGIN_PROOF_FORMAT, self.packet) self.client_ephemeral = int.from_bytes(parsed_data[0], 'little') self.client_proof = parsed_data[1] self.checksum = parsed_data[2] self.unk = parsed_data[3] except StructError as e: Logger.error( '[Login Proof]: on unpacking data(len={}), error={}'.format( len(self.packet), StructError))
def create_account(cursor: MySQLCursor, account: Account): account_table_name = AccountTable.ACCOUNT_LIST.value try: create_account_query = ('INSERT INTO ' + account_table_name + '(name, salt, verifier) VALUES (%s, %s, %s)') create_account_data = (account.name.upper(), account.salt, account.verifier) cursor.execute(create_account_query, create_account_data) except (DataError, ProgrammingError, DatabaseError) as e: Logger.error('[Account Manager]: (create_account) error {}'.format(e))
def _parse_account_name(self, buffer: BytesIO): Logger.info('[Auth Session Manager]: parsing account name') result = bytes() while True: char = buffer.read(1) if char and char != b'\x00': result += char else: break return result.decode('ascii')
def parse_config_file(self): """Parse the config file and return the content object """ try: with open(self.config_path, "r") as config_file: config_object = yaml.safe_load(config_file) return config_object except FileNotFoundError: Logger.publish_log_error("Config file not found {}".format( traceback.format_exc())) sys.exit(1)
def save_data(): logger.LogInfo("Working...") # pop out the last data of redis. (like stack in data structure) logger.LogInfo("Popping out last pushed data from redis") # "REDIS_YOUTUBE_VIDEO_DETAILS" comes from .env details = redis.lpop(os.getenv('REDIS_YOUTUBE_VIDEO_DETAILS')) fieldnames = [] # try: print("details::", details) if details: details = details.decode('ascii') # convert string to python dictionary details = json.loads(details) len_details = details.keys() # it will put all the header name to fieldnames as list. for i in range(len(len_details)): fieldnames.append(list(len_details)[i]) # comma seperated values header = ', '.join(fieldnames) column_value = [] for i in fieldnames: column_value.append('{}'.format(json.dumps(details[i]))) column_value = ', '.join(column_value) csv_file_name = "YoutubeLinkDetails.csv" # assigning path of the csv file to save file_path = "./csv/{}".format(csv_file_name) if(os.path.exists(file_path)): # 'a' mode is used to append into the file with open(file_path, 'a') as fp: fp.writelines("\n{}".format(column_value)) else: # 1) 'w' mode is used to write into the file with open(file_path, 'w') as fp: # 2) write header in 1st line fp.writelines(header) fp.writelines("\n{}".format(column_value)) save_data() else: logger.LogInfo("process complete done")
def __init__(self): self.logger = Logger('Login') self.get_base_cookies = "https://wx.qq.com/" self.get_ptqr_url = "https://login.wx.qq.com/jslogin?appid=wx782c26e4c19acffb&redirect_uri=https%3A%2F%2Fwx.qq.com%2Fcgi-bin%2Fmmwebwx-bin%2Fwebwxnewloginpage&fun=new&lang=zh_CN&_=1488277801955" self.qrcode = "https://login.weixin.qq.com/qrcode/" self.code = open('code.png', 'w') self.check = "https://login.wx.qq.com/cgi-bin/mmwebwx-bin/login?loginicon=true&uuid={uuid}&tip=0&r=2073077620&_=1488280416522" self.jm = "http://jiema.wwei.cn/fileupload/index/op/jiema.html" self.baseinfo = "https://wx2.qq.com/cgi-bin/mmwebwx-bin/webwxinit?r=2056259748&lang=zh_CN&pass_ticket=" self.statusnotify = "https://wx2.qq.com/cgi-bin/mmwebwx-bin/webwxstatusnotify" self.info = {} self._run()
def __get_random_account(self): account = [] f = open("Login/account.txt", "r").readlines() for line in f: account.append(line.rstrip('\n')) Logger.info("加载账号清单结束,共获取" + str(len(account)) + "个账号") index = random.randint(0, len(account) - 1) acc = account[index] if acc: acc = str(acc).split('----') return {'username': acc[0], 'password': acc[1]} else: return False
class ApiGetIp(object): def __init__(self, apiurl, username="", password="", split="\r\n", other={}): self.apiurl = apiurl self.username = username self.password = password self.split = split self.other = other self.open = True self.valide = ProxyValidate() self.log = Logger('ApiGetIp') def run(self): while(self.open): self.log.info("正在从代理API获取代理IP") try: result = requests.get(self.apiurl, headers=self.other) except requests.RequestException as error: self.log.error("API访问失败,原因为"+error) time.sleep(5) continue if result.status_code == 200: result_text = result.text ip_list =result_text.split(self.split) self.log.info("本次从API处获取到的代理数量为"+str(len(ip_list))+"枚,即将进入有效性验证") threading.Thread(target=self.valide.check, args=((ip_list),)).start() time.sleep(20) continue else: self.log.warning("API接口似乎出现了一些问题,请检查后重试!错误码为"+str(result.status_code)) time.sleep(5) continue
def _run(self): Logger.info("启动主服务器成功") Logger.info("开始进行OPENLAW模拟登陆") cookies = Login()._run() Logger.info("模拟登陆完成,已经获取用户cookies") Logger.info("进入爬虫系统") Clawer(self, cookies)._run_master()
def update_account(cursor: MySQLCursor, account: Account): account_table_name = AccountTable.ACCOUNT_LIST.value try: update_account_query = ('UPDATE ' + account_table_name + ' SET ip = %s, timezone = %s, os = %s, platform = %s, locale = %s' + ' WHERE name = %s' ) update_account_data = ( account.ip_addr, account.timezone, account.os, account.platform, account.locale, account.name.upper() ) cursor.execute(update_account_query, update_account_data) except (DataError, ProgrammingError, DatabaseError) as e: Logger.error('[Account Manager]: (update_account) error {}'.format(e))
async def handle_connection(self, reader: StreamReader, writer: StreamWriter): peername = writer.get_extra_info('peername') Logger.info( '[Login Server]: Accepted connection from {}'.format(peername)) auth = AuthManager(reader, writer) while not reader.at_eof(): response = await auth.process() if not response: break Logger.warning('[Login Server]: closing...') writer.close()
def _get_response(self): try: response = pack( '<2B20sQ2B', LoginOpCode.LOGIN_PROOF.value, LoginResult.SUCCESS.value, session.srp.server_proof, 0x00800000, # unk1 0x00, # unk2 0x00 # unk3 ) except Exception as e: Logger.error('[Login Proof]: {}'.format(e)) else: return response
def __init__(self, file_path): self.info = {} self._data = {} self.file_path = file_path self.company_pinyin = '' self.company_name = '' self.logger = Logger()
def _get_auth_response(self): # updating session request response = pack( '<BIBIB', AUTH_SESSION_RESPONSE_CODES.AUTH_OK.value, 0x00, # BillingTimeRemaining 0x00, # BillingPlanFlags 0x00, # BillingTimeRested 0x01 # Expansion, 0 - normal, 1 - TBC, must be set manually for each account ) packet = WorldPacket(WorldOpCode.SMSG_AUTH_RESPONSE.value, response).to_send() Logger.info('[Auth Session Manager]: sending SMSG_AUTH_RESPONSE') return packet
def get_metrics_from_cloud_watch(self): """Performs an API call to boto3 to get metrics for cloudwatch :return: None or Dict with metrics """ metric_data_queries = [] if self.db_engine == "maria" or self.db_engine == "aurora": db_metrics_dict = self.METRICS.get(self.monitoring_type).get(self.db_engine) for metric in db_metrics_dict.keys(): metric_data_query_dict = { "Id": "{}_{}".format(self.db_name.replace("-", "").lower(), metric.lower()), "MetricStat": { "Metric": { "Namespace": self.NAMESPACE, "MetricName": metric, "Dimensions": [ { "Name": "DBInstanceIdentifier", "Value": self.db_name } ] }, "Period": self.period, "Stat": "Maximum" }, "Label": metric, "ReturnData": True } metric_data_queries.append(metric_data_query_dict) response = self.cloudwatch_client.get_metric_data( MetricDataQueries=metric_data_queries, StartTime=datetime.utcnow() - timedelta(minutes=int(self.period/60)+3), EndTime=datetime.utcnow() - timedelta(minutes=int(self.period/60)), ScanBy='TimestampDescending' ) return response else: Logger.publish_log_error("DB engine not specified or not supported, Supported engines are aurora and maria") return None
def _get_response(self): Logger.debug('[Realmlist]: processing') realm_packet = realm.get_state_packet(RealmFlags.NORMAL.value, RealmPopulation.LOW.value) realm_packet_as_bytes = b''.join([realm_packet]) num_realms = 1 header = pack(Realmlist.REALMLIST_RESPONSE_HEADER_FORMAT, LoginOpCode.REALMLIST.value, Realmlist.MIN_RESPONSE_SIZE + len(realm_packet_as_bytes), 0x00, num_realms) footer = pack(Realmlist.REALMLIST_RESPONSE_FOOTER_FORMAT, 0) response = header + realm_packet_as_bytes + footer return response
def __init__(self, name, date, q, event=None): Thread.__init__(self, group=None, target=None, name=name, args=(), kwargs=None, daemon=False) self.ready = event self.log = Logger('lib') self._index = schema.get_schema('INDEX') self.meta_index = schema.get_schema('META_INDEX') self.detail = schema.get_schema('DETAIL_MODEL') # Collect basic data from current records in db con = self._index.get_con() self.meta = pd.read_sql_query('SELECT * FROM meta.index', con) self.q = q self.today = date
def main(): log = Logger() log.clear() log.log("This is a test.") log.debug("This is a test.") log.error("This is a test.") log.fatal_error("This is a test.")