def get_url(url, username, parent, request_id, action, isSub, type, isdir): """ 从RCMS获取用户的频道信息,匹配出channel_code :param url: :param username: :param request_id: :param action: :param isdir: :return: """ url_id = ObjectId() #检查dir情况下,url是否合法,不合法则变为url if isdir: if not url.endswith('/'): logger.info('get url url is not dir: url %s isdir %s' %(url, isdir)) isdir = False if isSub: isValid, is_multilayer, channel_code, ignore_case = rcmsapi.isValidUrlByPortal(username, parent, url) else: isValid, is_multilayer, channel_code, ignore_case = rcmsapi.isValidUrl(parent, url) #检查任务优先级 high_priority = False if isValid: high_priority = is_refresh_high_priority(channel_code) return {"_id": url_id , "r_id": request_id, "url": url, "ignore_case": ignore_case, "status": 'PROGRESS' if isValid else 'INVALID', "isdir": isdir, "username": username, "parent": parent, "created_time": datetime.now(), "action": action, "is_multilayer": is_multilayer, "channel_code": channel_code, 'type':type, 'high_priority':high_priority,'channel_name':get_channelname(url)}
def test_isValidUrl_cached(self): channel = { 'code': '0005', 'name': 'http://www.chinacache.com', 'multilayer': False, 'customerCode': '3', 'billingCode': '0005', 'customerName': 'chinacache', 'is_valid': True } when(rcmsapi.redisutil.channels_cache).hget( self.rediscache.CHANNELS_PREFIX % self.username, 'http://www.chinacache.com').thenReturn(None) cc = [{ "code": "7180", "customerName": "chinacache", "channelState": "COMMERCIAL", "multilayer": "false", "is_valid": "true", "name": "http://www.chinacache.com" }] when(rcmsapi.redisutil.OutSysToRedis()).render_channels_rcms( self.username).thenReturn(cc) when(rcmsapi.redisutil).getExtensiveDomainName().thenReturn( 'http://www.chinacache.com') self.assertTrue( (True, False, '0005'), rcmsapi.isValidUrl('chinacache', 'http://www.chinacache.com/test.jpg'))
def get_error_url(username, urls): errorList = [] BadFormat = [ '\n' + urls.pop(i) for i, url in enumerate(urls) if len(url) > 255 or url.startswith('http://') == False ] Failed404 = [ '\n' + urls.pop(i) for i, url in enumerate(urls) if getHttpStatus(url) >= 404 ] NotValidUrl = [ '\n' + urls.pop(i) for i, url in enumerate(urls) if rcmsapi.isValidUrl(username, url)[0] == False ] if BadFormat: errorList.append('\nfollowing urls ignored because of bad format:\n') errorList.append('\n'.join(BadFormat)) if NotValidUrl: errorList.append('\nfollowing urls ignored because of domain range:\n') errorList.append('\n'.join(NotValidUrl)) if Failed404: errorList.append( '\nfollowing urls ignored because of status code >= 404:\n') errorList.append('\n'.join(Failed404)) return ''.join(errorList)
def test_isValidUrl_False(self): when(rcmsapi.redisutil.channels_cache).hget( self.rediscache.CHANNELS_PREFIX % self.username, 'http://www.chinacache.com').thenReturn(None) when(rcmsapi.redisutil.channels_cache).expire().thenReturn(None) when(rcmsapi.redisutil.channels_cache).hget( self.rediscache.CHANNELS_PREFIX % self.username, 'http://www.chinacache.com').thenReturn(None) cc = [{ "code": "7180", "customerName": "chinacache", "channelState": "COMMERCIAL", "multilayer": "false", "is_valid": "true", "name": "http://www.chinacache.com" }] when(rcmsapi.redisutil.OutSysToRedis()).render_channels_rcms( self.username).thenReturn(cc) # verify(rcmsapi.redisutil.channels_cache).hset(self.rediscache.CHANNELS_BLACK%self.username,'http://www.chinacache.com',True) # verify(rcmsapi.redisutil.channels_cache).expire(self.rediscache.CHANNELS_BLACK % self.username, 300) when(rcmsapi.redisutil).getExtensiveDomainName().thenReturn( 'http://www.chinacache.com') self.assertTrue((False, False, 0), rcmsapi.isValidUrl('chinacache', 'http://www.wrong.com/test.jpg'))
def preload_timing_action(): username = request.form.get('username') parent_name = request.form.get('parent_name') preload_timing = request.form.get('preload_timing') preload_url = request.form.get('preload_url') preload_devices = request.form.get('preload_device') preload_validation = request.form.get('preload_validation') logger.debug("username:::%s" % username) logger.debug("parent_name:::%s" % parent_name) logger.debug("preload_timing:::%s" % preload_timing) logger.debug("preload_url::::%s" % preload_url) logger.debug("preload_devices:::%s" % preload_devices) logger.debug("preload_validation:::%s" % preload_validation) utf_username = username.encode("utf-8") utf_parent_name = parent_name.encode("utf-8") utf_preload_timing = preload_timing.encode("utf-8") utf_preload_url = preload_url.encode("utf-8") utf_preload_devices = preload_devices.encode("utf-8") utf_preload_validation = preload_validation.encode("utf-8") logger.debug("username:::%s" % type(utf_username)) logger.debug("parent_name:::%s" % type(utf_parent_name)) logger.debug("preload_timing:::%s" % type(utf_preload_timing)) logger.debug("preload_url::::%s" % utf_preload_url) logger.debug("preload_devices:::%s" % utf_preload_devices) logger.debug("preload_validation:::%s" % type(utf_preload_validation)) url_list = utf_preload_url.split('\r\n') device_list = utf_preload_devices.split('\r\n') logger.debug("url_list::::%s" % url_list) logger.debug("device_list::::%s" % device_list) all_data_devices = [] if utf_parent_name == '': for url_line in url_list: isValid, is_multilayer, channel_code, ignore_case = rcmsapi.isValidUrl( utf_username, url_line) if isValid: pre_devs = init_preload_devs(channel_code) if pre_devs: for pre_line in pre_devs: if pre_line['name'] in device_list: all_data_devices.append(pre_line) device_list.remove(pre_line['name']) else: for url_line in url_list: isValid, is_multilayer, channel_code, ignore_case = rcmsapi.isValidUrlByPortal( utf_username, utf_parent_name, url_line) if isValid: pre_devs = init_preload_devs(channel_code) if pre_devs: for pre_line in pre_devs: if pre_line['name'] in device_list: all_data_devices.append(pre_line) device_list.remove(pre_line['name']) logger.debug("all_data_devices::::%s" % all_data_devices) all_data_urls = [] index = 1 for url_line in url_list: if url_line != '': all_data_dict = {} all_data_dict['url'] = url_line all_data_dict['id'] = index all_data_urls.append(all_data_dict) index = index + 1 logger.debug("all_data_urls::::%s" % all_data_urls) send_type = config.get("big_preload_address", "preload_address") if len(all_data_devices) == 0: #return redirect(url_for('preload.preload_timing',device_error='device_error')) time_now = datetime.now() timing = datetime.strftime(time_now, "%Y-%m-%d %H:%m") args = {} args['timing'] = timing args['device_error'] = 'device_error' return render_template('preload_big_timing.html', args=args) else: big_preload_url = "http://" + send_type + "/internal/preloadDevice" logger.debug("big_preload_url:::::%s" % big_preload_url) utf_preload_timing = utf_preload_timing + ":00" params = urllib.urlencode({ "username": utf_username, "compressed": False, "tasks": json.dumps(all_data_urls), "nest_track_level": 0, "startTime": utf_preload_timing, "validationType": utf_preload_validation, "speed": "", "devices": json.dumps(all_data_devices) }) logger.debug("params:::%s" % params) try: logger.debug("param") urllib.urlopen(big_preload_url, params) except Exception, e: logger.debug("200G:error--%s" % traceback.format_exc(e)) return redirect('/preload_query')