def status(): for process in constants.APP_PROCESSES: pids = find_pid(process.get('token')) if not pids: ApiLogging.error([process.get('name'), pids], True) else: ApiLogging.info([process.get('name'), pids], True)
def decode(hash_type,hash_code): if login(): hash_field = robot.find_by_css('#ctl00_ContentPlaceHolder1_TextBoxInput') if hash_field is not None: type_field = robot.find_by_css('#ctl00_ContentPlaceHolder1_InputHashType') ApiLogging.info('hash: ' + str(hash) +' type: ' + str(type) + ' hashcode: '+ hash_code ) return None hash_field.set_value(hash) type_field.set_value(type) fill_captcha_if_needed() submit_button = robot.find_by_css("#ctl00_ContentPlaceHolder1_Button1") submit_button.click() result = robot.find_by_css('#ctl00_ContentPlaceHolder1_LabelAnswer') ApiLogging.info ("result: %s"%result.get_text().split('\n')[0]) if result.get_text() == 'Verify code error!': decode(hash_type,hash_code) elif 'payment' in result.get_text(): pr = robot.find_by_contain_text('a', 'Purchase') if pr: pr.click() result = robot.find_by_css('#ctl00_ContentPlaceHolder1_LabelAnswer') ApiLogging.info("result: %s" % result.get_text().split('\n')[0]) elif 'Not Found' in result.get_text(): ApiLogging.warning('Not Found') else: log(result.get_text().split('\n')[0]) else: ApiLogging.warning ('login fail')
def download(data): """ :param bytearray data: contain file path :return: file content if exists :rtype: dict """ params = Command.__get_data(data) if params: try: bp = os.path.realpath('.') path = bp + '/' + params ApiLogging.info("download path: " + str(path)) is_exists = os.path.exists(path) if is_exists: file = open(path, 'r') raw_data = file.read() file.close() else: return {'data': 'File Not Found', 'status': 'error'} return {'data': raw_data, 'status': 'success'} except: return { 'data': 'internal exception', 'status': constants.STATUS_ERROR } return { 'data': 'path format is wrong', 'status': constants.STATUS_ERROR }
def set_attribute(self, attr, value=True): """ Sets a custom attribute for our Webkit instance. Possible attributes are (deprecated) : * ``auto_load_images`` * ``dns_prefetch_enabled`` * ``plugins_enabled`` * ``private_browsing_enabled`` * ``javascript_can_open_windows`` * ``javascript_can_access_clipboard`` * ``offline_storage_database_enabled`` * ``offline_web_application_cache_enabled`` * ``local_storage_enabled`` * ``local_storage_database_enabled`` * ``local_content_can_access_remote_urls`` * ``local_content_can_access_file_urls`` * ``accelerated_compositing_enabled`` * ``site_specific_quirks_enabled`` For all those options, ``value`` must be a boolean. You can find more information about these options `in the QT docs <http://developer.qt.nokia.com/doc/qt-4.8/qwebsettings.html#WebAttribute-enum>`_. """ # value = "true" if value else "false" # self.conn.issue_command("SetAttribute", # self._normalize_attr(attr), # value) ApiLogging.info("%s method is deprecated" % self._normalize_attr(attr))
def run(self): """ create new socket for communicate with client, receive header data contain request information, process it and send accept, then listen for receive body data """ self.tcpSocket = QtNetwork.QTcpSocket() if not self.tcpSocket.setSocketDescriptor(self.socketDescriptor): raise Exception(self.tcpSocket.errorString()) self.tcpSocket.waitForReadyRead() buffer = self.tcpSocket.readAll() try: try: self.size, self.priority, self.timeout, self.route, self.module_version, self.call_back, self.token = self.extract( buffer) except ValueError: self.size, self.priority, self.timeout, self.route, self.call_back, self.token = self.extract(buffer) self.call_back = base64.b64decode(self.call_back).decode() self.token = utils.decrypt_rsa(self.token, utils.get_rsa()) self.route_name = self.route_to_name(self.route) ApiLogging.info('route is: ' + self.route_name) if not self.module_version: self.module_version = 'v_1_0' except Exception as e: print("rejected", e) self.tcpSocket.write('{"status":"reject"}'.encode()) self.__close() else: self.size = int(self.size) self.tcpSocket.write('{"status":"accept"}'.encode()) self.tcpSocket.waitForBytesWritten() self.tcpSocket.readyRead.connect(self.__read_data) self.tcpSocket.waitForDisconnected( 30 * 60 * 1000) # FIXME: change to better solution for big data timeout and handle incompleted data
def put(self, item): """ receive item and serialized it, then put on queue :param item: the item that should be put on queue :return: return True if put item on queue successfully """ try: serialized_item = dill.dumps(item) except PicklingError: raise SerializedError('can not serialized request') attempt = 0 # attempt two time to put serialized item on queue while attempt < 2: try: # use direct exchange to put on queue (address queue by name) self.channel.basic_publish( exchange='', routing_key=self.name, body=serialized_item, properties=pika.BasicProperties( delivery_mode=2, # make message persistent )) ApiLogging.info('add to queue') return True # if something on connection goes wrong, try again except pika.exceptions.AMQPConnectionError: self.connection, self.channel = self.__reconnect() attempt += 1 continue # after two attempt, raise error raise RabbitmqConnectionError('can not access to MQ')
def download_by_process_id(data): params = Command.__get_data(data) base_data_path = BASE_APP_PATH + '/modules/storage' if params: try: full_data_path = base_data_path + '/' + params['process_id'] ApiLogging.info("download path: " + str(full_data_path)) if not isdir(full_data_path): return {'data': 'process_id Not Found', 'status': 'error'} shutil.make_archive(full_data_path, 'zip', full_data_path) with open(base_data_path + '/' + params['process_id'] + '.zip', "rb") as f: bytes = f.read() encoded = base64.b64encode(bytes) return {'data': encoded, 'status': 'success'} except Exception as e: print(e) return { 'data': 'internal exception', 'status': constants.STATUS_ERROR } return { 'data': 'process_id format is wrong', 'status': constants.STATUS_ERROR }
def __run(process_name, action_name): process = __get_process(process_name) if process: ApiLogging.info(process.get('name') + ' running successful!', True) with open(process.get('log'), 'a+') as err: subprocess.Popen(['python3', process.get('path') + '/' + process.get('name'), process.get('token')], close_fds=True, stderr=err,bufsize=1, preexec_fn=__set_limit)
def check_result(self, result): if result.get_text() == 'Verify code error!': return VERIFY elif 'payment' in result.get_text(): ApiLogging.info('found payment') return PAYMENT elif 'Not Found' in result.get_text(): return NOT_FOUND else: return None
def send_signal(): pids = [] for process_name in constants.APP_PROCESSES: if process_name.get('name') == 'process_sync.py': pids = find_pid(process_name.get('token')) if len(pids) > 1: ApiLogging.warning('Too many sync process running') elif len(pids) == 1: p = psutil.Process(pids[0]) p.send_signal(signal.SIGUSR1)
def run(self, file, default_size, remove_lines): size = default_size * 1000 if os.path.isfile(file): if os.path.getsize(file) >= size: with open(file, 'r') as f: lines = f.readlines() if len(lines) < remove_lines: ApiLogging.critical( "the specific number of lines is greater than number of desired {0} file lines".format(file)) with open(file, 'w') as f: f.writelines(lines[remove_lines:]) else: ApiLogging.critical("the {0} is not existed".format(file))
def __init__(self): if not FetchResult.TLDS: path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) ApiLogging.info("open TLDS file for read...") file = open(path + '/vendor/data/tlds.txt') lines = file.readlines() for line in lines: linee = line.strip() if len(linee) < 1: continue FetchResult.TLDS.append(linee) file.close() self.internettlds = FetchResult.TLDS
def stop(process_name=None): if process_name and __get_process(process_name) is not None: process = __get_process(process_name) pids = find_pid(process.get('token')) if not pids: ApiLogging.warning(process_name + ' is not running!', True) else: for pid in pids: ApiLogging.info(process_name + ' stopped successful!', True) proc = psutil.Process(pid) if proc.is_running(): proc.kill() else: for process in constants.APP_PROCESSES: pids = find_pid(process.get('token')) if not pids: ApiLogging.warning( process.get('name') + ' is not running!', True) else: for pid in pids: ApiLogging.info( process.get('name') + ' stopped successful!', True) proc = psutil.Process(pid) if proc.is_running(): proc.kill() try: __cleanup() except Exception as e: # print("cleanup exception: ", e) pass
def __init__(self): # define shared operations between upgrade and downgrade ApiLogging.info('update to version 1.0') # define migration and models dir path = os.path.dirname( os.path.dirname(os.path.dirname(os.path.abspath(__file__)))) migration_path = path + '/migrations/v_1_0' model_path = path + '/models' self.manager = DatabaseManager(Database.db, directory=path + '/migrations/v_1_0') self.mg = Migrate() self.mg.create_migration(self.manager, model_path, migration_path, ignore=['base_model'])
def fill_captcha_if_needed(): captcha_field = robot.find_by_css('#ctl00_ContentPlaceHolder1_TextBoxCode') if captcha_field is not None: ApiLogging.info ('captcha needed') robot.set_viewport_size(1280,800) img = robot.find_by_css("#Image1") rect = img.get_position() box = (int(rect['left']), int(rect['top']), int(rect['right']), int(rect['bottom'])) filename = tempfile.mktemp('.png') robot.save_as_png(filename, 1280, 800) image = Image.open(filename) os.unlink(filename) captcha_image = image.crop(box) captcha_image.save('%s.png' % unique_time, 'png') captcha_field.set_value(resolve_captcha('%s.png' % unique_time)) os.remove('%s.png' % unique_time)
def __load_module(self): """ load module if not loaded :return: module instance :rtype: BaseModule """ module_path = "modules." + self.task_model.route + '.' + self.task_model.module_version + ".module" try: ApiLogging.info("import") app_module = importlib.import_module(module_path) return app_module.Module(self.task_model) except Exception as e: # TODO: save exception same as run method exception part ApiLogging.error("import exception " + str(e)) return None
def __init__(self, parent=None): self.parent = parent # type: BaseModule if not SimilarDomain.TLDS: ApiLogging.info("open TLDS file for read...") path = os.path.dirname(os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))) file = open(path + '/vendor/data/tlds.txt') lines = file.readlines() for line in lines: linee = line.strip() if len(linee) < 1: continue SimilarDomain.TLDS.append(linee) file.close() self.internettlds = SimilarDomain.TLDS self.results = list()
def run(self): while 1: # sleep for decrease cpu usage sleep(0.01) QCoreApplication.processEvents() from_db = False if self.running_tasks.__len__() >= self.limit(): continue try: if self.items: from_db = True item = self.items.pop() else: item = self.queue.get() # type: RequestObject if item: if not from_db: task_model = TaskModel() task_model.route = item.route task_model.process_id = item.process_id task_model.status = constants.STATUS_RUNNING task_model.data = item.data task_model.call_back = item.call_back task_model.token = item.token task_model.module_version = item.module_version task_model.queue_name = self.queue_name task_model.save() else: task_model = item task = Task(task_model) if task.instance_module: task.instance_module.task_finished.connect(self.__task_finished) task.setAutoDelete(True) self.running_tasks.update({item.process_id: task}) # check cancel or pause request before start self.apply_action_by_pid(task, item.process_id) self.pool.start(task) else: # TODO: set error alarm ApiLogging.error('problem running task') except Exception as e: ApiLogging.error('process exception' + str(e)) # TODO: set error alarm continue
def validate_email(email, check_mx=False, verify=False, debug=False, smtp_timeout=10, sending_email=''): regex = '^[_a-z0-9-]+(\.[_a-z0-9-]+)*@[a-z0-9-]+(\.[a-z0-9-]+)*(\.[a-z]{2,})$' # Syntax check match = re.match(regex, email) if match == None: ApiLogging.warning('Bad Syntax') return False if check_mx: hostname = email[email.find('@') + 1:] mx_hosts = get_mx_ip(hostname) if mx_hosts is None: return False for key in mx_hosts: try: server = smtplib.SMTP() server.set_debuglevel(0) # SMTP Conversation if mx_hosts[key] is None: return False server.connect(mx_hosts[key]) server.helo(server.local_hostname) server.mail(sending_email) code, message = server.rcpt(email) server.quit() if code == 250: return True else: return False except: return False
def _get_public_suffix_list(): """Return a set containing all Public Suffixes. If the env variable PUBLIC_SUFFIX_LIST does not point to a local copy of the public suffix list it is downloaded into memory each time urltools is imported. """ global PSL if not PSL: ApiLogging.info('open PLS ...') with codecs.open( os.path.realpath(os.path.dirname(__file__) + '/data/tlds.txt'), 'r', 'utf-8') as f: psl_raw = f.readlines() psl = set() for line in psl_raw: item = line.strip() if item != '' and not item.startswith('//'): psl.add(item) PSL = psl
def put(self, item, tag, priority=None): """ receive RequestObject as item and put in PGQ :param item: the item that should be put on queue :param tag: use tag as previewing inside of each item, it must be unique :param priority: use priority for ordering item :return: return True if put item on queue successfully """ try: model = QueueModel() model.name = self.name model.data = item model.tag = tag if priority: model.priority = priority model.save() ApiLogging.info('add to queue') return True except IntegrityError: raise DatabaseError('tag is repetitive') except Exception: raise DatabaseError('can not access database')
def run(self): ApiLogging.info('Start Consuming From ' + str(self.queue_name)) while 1: # # check if there is new request on queue status = self.channel.queue_declare(queue=self.queue_name, durable=True) if int(status.method.message_count) == 0: continue else: ApiLogging.info(status.method.message_count) # check if still we're able to process request, then get new message if int(self.process_limit) == 3: (_, _, message) = self.__get_message() ApiLogging.info(message)
def decode(self, hash_type, hash_code): if self.login(): hash_field = self.robot.find_by_css( '#ctl00_ContentPlaceHolder1_TextBoxInput') if hash_field is not None: type_field = self.robot.find_by_css( '#ctl00_ContentPlaceHolder1_InputHashType') hash_field.set_value(hash_code) type_field.set_value(hash_type) self.fill_captcha_if_needed() submit_button = self.robot.find_by_css( "#ctl00_ContentPlaceHolder1_Button1") submit_button.click() result = self.robot.find_by_css( '#ctl00_ContentPlaceHolder1_LabelAnswer') ApiLogging.info("result in hash: %s" % result.get_text()) ApiLogging.info('type: ' + str(hash_type) + ' code: ' + str(hash_code)) chk_result = self.check_result(result) if chk_result == VERIFY: self.decode(hash_type, hash_code) elif chk_result == PAYMENT: pr = self.robot.find_by_contain_text('a', 'Purchase') ApiLogging.info('click payment' + str(pr.get_text())) if pr: pr.click() result = self.robot.find_by_css( '#ctl00_ContentPlaceHolder1_LabelAnswer') chk_result = self.check_result(result) if chk_result is None: return result.get_text() elif chk_result == NOT_FOUND: return None else: return result.get_text().split('\n')[0] else: ApiLogging.warning('login fail')
def send_signal(process_names): try: for name in process_names: pids = [] for process_name in constants.APP_PROCESSES: if process_name.get('name') == name: pids = find_pid(process_name.get('token')) if len(pids) > 1: ApiLogging.warning('Too many ' + str(name) + ' process running') elif len(pids) == 1: p = psutil.Process(pids[0]) ApiLogging.info('process name: ' + str(pids[0])) p.send_signal(signal.SIGUSR1) except Exception as e: ApiLogging.critical('broadcast signal exception: ' + str(e))
def login(): if is_logeed_in(): ApiLogging.info('cookie login') return True else: ApiLogging.info ('captcha login') robot.go_to('/login.aspx') email_field = robot.find_by_css('#ctl00_ContentPlaceHolder1_TextBoxCmd5_E') password_field = robot.find_by_css('#ctl00_ContentPlaceHolder1_TextBoxCmd5_P') email_field.set_value(email) password_field.set_value(password) fill_captcha_if_needed() submit_button = robot.find_by_css("#ctl00_ContentPlaceHolder1_Button1") submit_button.click() robot.save_cookies_to_file(robot.get_cookies()) if is_logeed_in(): ApiLogging.info ('logged in') return True return False
def start(process_name=None): requirements = check_requirements() if requirements is not True: for requirement in requirements: ApiLogging.critical(requirement, True) return if process_name and __get_process(process_name) is not None: process = __get_process(process_name) pids = find_pid(process.get('token')) if pids: ApiLogging.warning(str(len(pids)) + ' instance(s) of this process already running!', True) else: __run(process_name, 'start') else: for process in constants.APP_PROCESSES: if find_pid(process.get('token')): ApiLogging.warning(process.get('name') + ' is already running!', True) else: __run(process.get('name'), 'start')
def run(self): try: """ run module and save result in database """ ApiLogging.info("module " + str(self.task_model.route)) self.instance_module.prepare() self.instance_module.run() self.task_model.status = constants.STATUS_SUCCESS if not self.instance_module.result: error = {'code': ResultNotSetError.get_code(), 'message': 'module not set any result before return!'} self.task_model.response_data = { 'data': to_json(self.instance_module.result), 'error': to_json(error), 'status': constants.STATUS_ERROR, 'token': self.task_model.token, 'process_id': self.task_model.process_id } except Exception as e: LogHandler.save(sys.exc_info(), process_id=self.task_model.process_id) ApiLogging.error('result exception ' + str(e)) self.task_model.status = constants.STATUS_FAILED old_exception = e.__str__() try: error = {'code': e.get_code(), 'message': e.args[0]} except Exception as e: error = {'code': 0, 'message': old_exception} self.task_model.response_data = { 'data': to_json(self.instance_module.result), 'error': to_json(error), 'status': constants.STATUS_ERROR, 'token': self.task_model.token, 'process_id': self.task_model.process_id } finally: self.task_model.save() ApiLogging.info('emit finish signal') self.instance_module.task_finished.emit(self.task_model.process_id)
def signal_handler(self, signal, frame): ApiLogging.info('You pressed Ctrl+C!') self.name = 'step2' sleep(5) ApiLogging.info('cont')
size = default_size * 1000 if os.path.isfile(file): if os.path.getsize(file) >= size: with open(file, 'r') as f: lines = f.readlines() if len(lines) < remove_lines: ApiLogging.critical( "the specific number of lines is greater than number of desired {0} file lines".format(file)) with open(file, 'w') as f: f.writelines(lines[remove_lines:]) else: ApiLogging.critical("the {0} is not existed".format(file)) if __name__ == '__main__': if sys.argv.pop(-1) != "0X105070": try: ApiLogging.info(type(sys.argv.pop(-1))) except IndexError: ApiLogging.critical('missing token parameter') sys.exit(0) ApiLogging.critical('wrong token parameter') sys.exit(0) # app = QCoreApplication(sys.argv) while True: time.sleep(30) for file in os.listdir(BASE_APP_PATH + '/logs'): p = ProcessLog() p.run(BASE_APP_PATH + '/logs/' + file, default_size=DEFAULT_SIZE_LOG, remove_lines=REMOVE_LINES_LOG) # sys.exit(app.exec_())
f.write('debug_mode = ' + str(debug_mode)) if not os.path.exists(BASE_APP_PATH + '/logs'): os.mkdir(BASE_APP_PATH + '/logs') args = vars(arg.parse_args()) action = args.get('action') p = args.get('process_name', None) set_mode(debug_mode) if action == 'status': status() elif action == 'start': if p: start(p) else: start() elif action == 'stop': if p: stop(p) else: stop() elif action == 'restart': stop() ApiLogging.critical('Start all process after one second...') sleep(3) start() else: # print('action is not found!') pass