def get(self, filep='', urlpath='', json=True): if self.con: purl = self.path + '?' + urlpath if urlpath else self.path_param_url loger.info('get data from: %s' % purl) #purl = purl.encode('utf8') if isinstance(purl, unicode) else purl.decode('gbk').encode('utf8') #purl = purl.encode('utf8') self.con.request('GET', purl) resp = self.con.getresponse() if resp.status == 200: if filep: fo = None try: fo = open(filep, 'w+b') fo.write(resp.read()) except: self._url_file_save(purl, filep) finally: if fo: fo.close() loger.info('file save done') return filep else: # perhaps get a file some... rt = resp.read() if json: return self._rt_solve(rt) else: return rt else: raise RuntimeError('NO connection!')
def via_cache(cls, cpath, force=False): # check for switch cache dt = time.time() - cls.last_time if dt > cls.switch_time: loger.info("time to switch cacher") cls.cacheA_Mark = not cls.cacheA_Mark cls.cache = cls.cacherA if cls.cacheA_Mark else cls.cacherB cls.cache_array = [] cls.last_time = time.time() cls.stimer = Timer(cls.clear_time, cls.cclear) cls.stimer.start() # real job cpath = cls._xpath(cpath) if cpath in cls.cache: loger.info("%s in cache!" % cpath) if cls.C_LOCK is True: loger.debug("CHECK: wait for C_LOCK!") t = cls.lock_wcount while t > 0: time.sleep(cls.lock_wait) if cls.C_LOCK is False: break t -= 1 loger.error("CHECK: wait for C_CLOCK over time!") return None cls.cache_array.remove(cpath) cls.cache_array.insert(0, cpath) return cls.cache[cpath] else: return None
def __enter__(self): loger.info('enter with con by baseurl: %s' % self.baseurl) if self.__class__.server_schema == 'https': self.con = httplib.HTTPSConnection(self.baseurl, timeout=5, context=Context) else: self.con = httplib.HTTPConnection(self.baseurl, timeout=5) return self
def get(self, token, filep='', json=True): if self.con: self._url_maker(token) self.con.request('GET', self.url) resp = self.con.getresponse() if resp.status == 200: if filep: fo = None try: fo = open(filep, 'w+b') fo.write(resp.read()) except: self._url_file_save(self.url, filep) finally: if fo: fo.close() loger.info('file save done') return filep else: # perhaps get a file some... rt = resp.read() if json: return self._rt_solve(rt) else: return rt else: raise RuntimeError('NO connection!')
def col_dep_tearchers(self, dpid, level, page=1, page_limit=100, takeall=False): # 获取某个部门的老师 reqd = { 'usertype': 2, 'departid': dpid, 'level': level, 'page': page, 'pageSize': page_limit } counts = 100 if takeall else 2 teachers = [] wx_rtdata = None total = 0 for x in range(1, counts): reqd['page'] = x realurl = self.handle_msger(reqs=reqd) with capp_smsg('get_users') as qer: wx_rtdata = qer.get(urlpath=realurl) if wx_rtdata['code'] == 0: teachers.extend(wx_rtdata['data']['dataList']) total += len(wx_rtdata['data']['dataList']) if len(wx_rtdata['data']['dataList']) < page_limit: loger.info("users with total: %s" % total) break else: break time.sleep(0.2) return teachers
def get(self, filep='', urlpath='', json=True, force=False, viacache=True): baseurl = self.server_schema + "://" + self.baseurl purl = baseurl + (self.path + '?' + urlpath) if urlpath else self.path_param_url if not force: rt = self.__class__.via_cache(purl) if rt is not None: if json: return self._rt_solve(rt) else: return rt r = requests.get(purl) if filep: fo = None try: fo = open(filep, 'w+b') fo.write(resp.read()) except: self._url_file_save(purl, filep) finally: if fo: fo.close() loger.info('file save done') return filep else: # perhaps get a file some... rt = r.content print(rt) if not json: return rt rt_json = r.json() if viacache and rt_json['code'] == 0 and 'data' in rt_json: self.__class__.store_cache(purl, rt) return rt_json raise RuntimeError('NO connection!')
def get(self, filep='', urlpath='', json=True): purl = self.path + '?' + urlpath if urlpath else self.path_param_url loger.info('get data from: %s' % purl) #purl = purl.encode('utf8') if isinstance(purl, unicode) else purl.decode('gbk').encode('utf8') #purl = purl.encode('utf8') r = requests.get(purl) content = r.content if filep: fo = None try: fo = open(filep, 'w+b') fo.write(content) except: self._url_file_save(purl, filep) finally: if fo: fo.close() loger.info('file save done') return filep else: # perhaps get a file some... if json: return self._rt_solve(content) else: return content
def cclear(cls): if cls.cacheA_Mark: # when cacher_a works... clear cacherB _ = cls.cacherB else: _ = cls.cacherA loger.info("600s and clear on cache: %s with len: %d" % ('cacherB' if cls.cacheA_Mark else 'cacherA', len(_))) _.clear()
def store_cache(cls, cpath, data): cpath = cls._xpath(cpath) loger.info("%s store into cache!" % cpath) if cls.cpos >= cls.cache_limit and cls.C_LOCK is False: loger.info("cache ups to limit! do short clear.") _times = min(cls.cache_kicks, round(len(cls.cache) * cls.cache_kickr)) cls.C_LOCK = True for x in xrange(_times): xpath = cls.cache_array.pop() try: cls.cache.pop(xpath) except KeyError: loger.warn("pop cache error with key: %s" % xpath) continue cls.cpos = len(cls.cache) - 1 cls.C_LOCK = False cls.cpos += 1 loger.debug("cache size: %s!" % cls.cpos) cls.cache_array.append(cpath) cls.cache[cpath] = data return True
def get(self, filep='', urlpath='', json=True, force=False, viacache=True): purl = self.path + '?' + urlpath if urlpath else self.path_param_url print(purl) if not force: rt = self.__class__.via_cache(purl) if rt is not None: if json: return self._rt_solve(rt) else: return rt if self.con: #turl = purl.encode('utf8') if isinstance(purl, unicode) else purl.decode('gbk').encode('utf8') #turl = purl.encode('utf8') self.con.request('GET', purl) resp = self.con.getresponse() if resp.status == 200: if filep: fo = None try: fo = open(filep, 'w+b') fo.write(resp.read()) except: self._url_file_save(purl, filep) finally: if fo: fo.close() loger.info('file save done') return filep else: # perhaps get a file some... rt = resp.read() if not json: return rt rt_json = self._rt_solve(rt) if viacache and rt_json['success']: self.__class__.store_cache(purl, rt) return rt_json else: raise RuntimeError('NO connection!')
def __exit__(self, exc_type, exc_val, exc_tb): loger.info("done ap_msger")
def __enter__(self): loger.info('enter with con by baseurl: %s' % self.baseurl) return self