class Fetcher: def __init__(self,item,debug_level=0,soc_timeout=10,log=None,name="<unknown>"): self._name = name self.log= LogAdapter(log=log) self._client = HttpClient(log,debug_level=debug_level,req_timeout=soc_timeout) self._client.timeout=11 self._html = "" self.result_handlers = [] self.exception_handler = [] self.result = [] def Fetch(self): try: self._fetch() except Exception as e: self.log.exception(e) self.handle_exception() self.handle_exception() def add_handler(self,handler=None,exception_handler=None): if exception_handler: self.exception_handlers.append(exception_handler) if handler: self.result_handlers.append(handler) def handle_result(self): for handler in self.result_handlers: handler.process() def handle_exception(self): for handler in self.exception_handler: handler.process()
class _db_conn_helper: def __init__(self,conn=None,log=None): self.log = LogAdapter(log) self._conn = conn self._cursor = self._conn.cursor() def _handle_exception(self,e): """this error means that db conn or db internal err happened in the process of a transaction which can not be reconnect,try to reopen it manually """ self.log.exception(e) if self._conn._transaction: self.log.error("db conn is dead,1111111111111.") self._conn._reset(True) self._cursor = self._conn.cursor() if not self._conn._ping_check(): self.log.error("db conn is dead,reconnect failed.")
class FetcherBase: def __init__(self, item_id, track_type,log=None): self.log = LogAdapter(log) self.item_id = item_id self.track_type = track_type self.name = "FetcherBase" self.proxy_dict = "" self.track_dict={} self.http = HttpClient() self.http.req_timeout = 30 self.conn = None self.db = None self.debug_level = 0 self.initialised = False def SetProxy(self,proxy_dict): self.proxy_dict = proxy_dict if proxy_dict is not None and len(proxy_dict) > 0: self.http.AddProxy(self.proxy_dict) def SetDBConn(self,conn): self.conn = conn self.db = track_db.TrackDB(db_conn=self.conn, log=self.log) def Fetch(self): try: self._fetch() except Exception as e: self._error_handle(e) def _chk_new_items(self,item_list): try: counter = len(item_list) if counter == 0: self.log.debug("[%s] Item info chk over with result is 0.", self.item_id ) return False # got item info if not self.initialised: self.track_dict = self.db.item_get_top_n(self.item_id,counter,self.track_type) self.initialised = True for item_info in item_list: if item_info not in self.track_dict: self.track_dict[item_info]=item_info #??? result = self._new_data(item_info) if result == -1: self.log.warn("[%s] Item info already in db, track_time=%s" ,self.name, str(item_info.track_time) ) elif result == 2: self.log.info("[%s] Tracking ended for track_time=%s" ,self.name, str(item_info.track_time) ) #break self.log.debug("[%s] Item info chk over.", self.item_id ) return True except Exception as e: self._error_handle(str(e)) return False def _new_data(self,item): """ result_ 1 new data stored 2 tracking ended -1 already in db """ self.log.info("[%s] New track info coming:[type=%s|delivered=%d] ITEM=%s,%s,%s,%s", self.name, self.track_type, item.is_ended, item.name, item.description, item.location, item.track_time ) if self.db: return self.db.sp_insert_new_item(self.track_type, item.is_ended, item.name, item.track_time, item.description, item.location) else: raise Exception("db did not initialised") def _error_handle(self, msg): if msg: self.log.error("[%s] error happend:%s", self.item_id, msg ) if self.db: return self.db.sp_update_item_status(self.track_type,self.item_id) else: self.log.error("[%s] error happend: db access error at meantime...",self.item_id) if issubclass(msg,BaseException): self.log.exception(msg) def _dump_error(self,item_id,fetch_url,text="",e=None): SAVEPATH=r'ex-pages' """ save exception page""" from time import localtime,time import codecs,os if text is None or text == "": return t=localtime(time()) t_str="%d%d%d%d%d%d"%(t.tm_year,t.tm_mon , t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec) filename=r'./%s/%s_%s.html'%(self.SAVEPATH,item_id,t_str) vavava.util.assure_path("./%s"%self.SAVEPATH) f=codecs.open(filename,"w",'utf-8') file_full_name=os.path.abspath(filename) if f: f.writelines('<!--' + fetch_url + '--!>'+os.linesep) f.write(text) f.close() self.log.error(r"page saved at %s",file_full_name)
class Fetcher(object): def __init__(self, log=None): self.filters = [] self.log = LogAdapter(log) self.result_data_type = None self.datas = [] def execute(self): for filter in self.filters: if filter[0] == 1: self.filter_get(filter[1], filter[2:]) elif filter[0] == 2: self.filter_process(filter[1]) elif filter[0] == 3: self.filter_result(filter[1:]) elif filter[0] == 4: self.filter_result_db(filter[1], filter[2]) def filter_get(self, charset="utf8", urls=[]): if len(urls) == 0: self.log.warn("no income resource") htmls = [] for url in urls: try: from vavava.httpclient import HttpClient client = HttpClient(log=None, debug_level=0, req_timeout=30) data = client.Get(url) if data: htmls.append(data.decode(charset)) else: self.log.debug(url) except Exception as e: self.log.LOG.exception(url, e) self.datas = htmls def filter_process(self, reg_str=""): result = [] for data in self.datas: try: matches = reg_helper(data, reg_str) for match in matches: result.append(match) except Exception as e: self.log.exception(e) self.datas = result def filter_result(self, keys=[]): class result_data: def __init__(self, values=[]): self.values = values def _key(self): key = "" for i in keys: key += self.values[i] return key def __lt__(self, other): return self._key() < other._key() def __hash__(self): return hasattr(self._key()) results = [] for i in range(len(self.datas)): results.append(result_data(self.datas[i])) self.datas = results def filter_result_db(self, conn, table, cols, values_format, types): if not (conn and table and cols and values_format): return sql = """ insert into %s(%s) values(%s) """ sql1 = sql % (table, cols, values_format) tmp = [] for result in self.results: for i in range(len(types)): tmp.append(self.data(types[i], result[i])) sql2 = sql1 % tmp cursor = conn.cursor() cursor.execute(sql2) conn.commit() def data(self, t, data): if t == "string": return data elif t == "int": return int(data) elif t == "datetime": import time return time.strptime(data, "%d/%m/%y %H:%M")
class Work(OBase): """ base class for user-work-item,self.do() must be written over For priority work: you need to overwrite __lt__ and __hash__ """ def __init__(self, period=None, group="", begin_time=None, end_time=None, log=None, name="<?work>", *args, **kwargs ): OBase.__init__(self,name=name) self.log = LogAdapter(log) #self._work_name = name self._group_id = group self._done = False self._redoable = False self._begin_time = begin_time self._end_time = end_time self._period = period self._db_conn = None self._args = args self._kwargs = kwargs self._serial_id_ = None # specify id for serial works, # in order to run these works in the same thread def init(self,period=None, group="", begin_time=None, end_time=None, log=None, name="<?work>", *args, **kwargs): self.log = LogAdapter(log) self._group_id = group self._begin_time = begin_time self._end_time = end_time self._period = period self._args = args self._kwargs = kwargs self.setName(name) def do(self,worker=None): pass def _do(self,worker=None): try: self._done=True self.do(worker) except Exception as e: self.log.error("delete work:group=%s,name=%s,%s",self._group_id,self._name,e) self.log.exception(e) except: pass def add_work_to_parent(self,work): if self._wq_parent: self._wq_parent.QueueWork(work) else: raise Exception("GeneratorWork has no parent handle") def get_instance(self,xml): pass def __lt__(self, other): if self._begin_time and other._begin_time: return self._begin_time < other._begin_time elif self._begin_time: return False elif other._begin_time: return True def __hash__(self): return hash(self._begin_time)