示例#1
0
def delete_fc_rid(urls, devices):
    """

    Args:
        urls: [{"username": xxx, "id": xxxx}]
        devices: [{'code': 200, 'type';‘FC’}]

    Returns:

    """
    try:
        if urls:
            judge_fc_result = judge_device_fc(devices)
            # fc all success
            if judge_fc_result:

                for url in urls:
                    RESULT_REFRESH = redisfactory.getDB(5)
                    url_id = url.get('id')
                    url_id_fc_r = "URLID_" + str(url_id) + '_FC_R'
                    url_id_fc = "URLID_" + str(url_id) + '_FC'
                    rid_id_fc = RESULT_REFRESH.get(url_id_fc_r)
                    # delete url_id_fc_r
                    RESULT_REFRESH.delete(url_id_fc_r)
                    # delete rid_id_fc  value url_id_fc
                    RESULT_REFRESH.srem(rid_id_fc, url_id_fc)
                    rid_id_fc_smembers = RESULT_REFRESH.smembers(rid_id_fc)
                    if not rid_id_fc_smembers:
                        # _F   finish time
                        RESULT_REFRESH.set(rid_id_fc + '_F', time.time())
                        RESULT_REFRESH.expire(rid_id_fc + '_F', expire_time)

    except Exception, e:
        logger.error("delete_fc_rid error:%s" % traceback.format_exc(e))
        logger.info('delete_fc_rid urls:%s, devices:%s' % (urls, devices))
示例#2
0
def delete_fc_urlid_host(url_id_fc, host):
    """
    delete urlid_id_fc host
    Args:
        url_id_fc: key of reids 5
        host: the ip of device

    Returns:

    """
    try:
        if url_id_fc and host:
            RESULT_REFRESH = redisfactory.getDB(5)
            # delete the value of set
            RESULT_REFRESH.srem(url_id_fc, host)
            hosts_members = RESULT_REFRESH.smembers(url_id_fc)
            if not hosts_members:
                url_id_fc_r = url_id_fc + "_R"
                rid_id_fc = RESULT_REFRESH.get(url_id_fc_r)
                # delete rid_id_fc  value url_id_fc
                RESULT_REFRESH.srem(rid_id_fc, url_id_fc)
                rid_id_fc_smembers = RESULT_REFRESH.smembers(rid_id_fc)
                if not rid_id_fc_smembers:
                    # _F   finish time
                    RESULT_REFRESH.set(rid_id_fc + '_F', time.time())
                    RESULT_REFRESH.expire(rid_id_fc + '_F', expire_time)
    except Exception, e:
        logger.error('delete_fc_urlid_host error:%s' % traceback.format_exc(e))
        logger.info('delete_fc_urlid_host url_id_fc:%s, host:%s' %
                    (url_id_fc, host))
示例#3
0
def delete_urlid_host(url_id, host, type_t='HPCC'):
    """
    delete urlid_id_fc host
    Args:
        url_id_fc: key of reids 5
        host: the ip of device
        type_t: the type of device

    Returns:

    """
    try:
        if url_id and host:
            RESULT_REFRESH = redisfactory.getDB(5)
            url_id_type = 'URLID_' + str(url_id) + "_" + type_t
            # delete the value of set
            RESULT_REFRESH.srem(url_id_type, host)
            hosts_members = RESULT_REFRESH.smembers(url_id_type)
            if not hosts_members:
                url_id_type_r = url_id_type + "_R"
                rid_id_type = RESULT_REFRESH.get(url_id_type_r)
                # delete rid_id_fc  value url_id_fc
                RESULT_REFRESH.srem(rid_id_type, url_id_type)
                rid_id_fc_smembers = RESULT_REFRESH.smembers(rid_id_type)
                if not rid_id_fc_smembers:
                    # _F   finish time
                    RESULT_REFRESH.set(rid_id_type + '_F', time.time())
                    RESULT_REFRESH.expire(rid_id_type + '_F', expire_time)
    except Exception as e:
        logger.error('delete_fc_urlid_host error:%s' % traceback.format_exc(e))
示例#4
0
def add_rid_url_info_into_redis(rid, urls):
    """
    rid_id_FC:{URLID_id1_FC, URLID_id2_FC}
    rid_id_HPCC:{URLID_id1_HPCC, URLID_id2_HPCC}

    Args:
        rid: the _id of request
        urls: the urls

    Returns:
    """
    try:
        if urls:
            RESULT_REFRESH = redisfactory.getDB(5)
            RESULT_REFRESH_PIPE = RESULT_REFRESH.pipeline()
            key_fc = "RID_" + str(rid) + "_FC"
            key_hpcc = "RID_" + str(rid) + "_HPCC"
            for url in urls:
                value_fc = "URLID_" + str(url.get("_id")) + "_FC"
                value_fc_r = value_fc + "_R"
                value_hpcc = "URLID_" + str(url.get('_id')) + "_HPCC"
                value_hpcc_r = value_hpcc + "_R"
                # orignal start
                RESULT_REFRESH_PIPE.sadd(key_fc, value_fc)

                RESULT_REFRESH_PIPE.sadd(key_hpcc, value_hpcc)
                # value  key  reverse

                RESULT_REFRESH_PIPE.set(value_fc_r, key_fc)
                RESULT_REFRESH_PIPE.set(value_hpcc_r, key_hpcc)

                # exipire key
                RESULT_REFRESH_PIPE.expire(key_fc, expire_time)
                RESULT_REFRESH_PIPE.expire(key_hpcc, expire_time)
                RESULT_REFRESH_PIPE.expire(value_fc_r, expire_time)
                RESULT_REFRESH_PIPE.expire(value_hpcc_r, expire_time)
                # orignal end

                # logger.debug("rubin_test   delete pipe")
                # RESULT_REFRESH.sadd(key_fc, value_fc)
                #
                # RESULT_REFRESH.sadd(key_hpcc, value_hpcc)
                # # value  key  reverse
                #
                # RESULT_REFRESH.set(value_fc_r, key_fc)
                # RESULT_REFRESH.set(value_hpcc_r, key_hpcc)
                #
                # # exipire key
                # RESULT_REFRESH.expire(key_fc, expire_time)
                # RESULT_REFRESH.expire(key_hpcc, expire_time)
                # RESULT_REFRESH.expire(value_fc_r, expire_time)
                # RESULT_REFRESH.expire(value_hpcc_r, expire_time)

            RESULT_REFRESH_PIPE.execute()
    except Exception, e:
        logger.error('add_rid_url_info_into_redis error:%s' %
                     traceback.format_exc(e))
        logger.error('add_rid_url_info_into_redis rid:%s, urls:%s' %
                     (rid, urls))
示例#5
0
def cp_rewrite_toDB():
    r = redisfactory.getDB(15)
    logger.debug("cp_rewrite_toDB begining...")
    rewrites = []
    for key in r.keys():
        d = {"CHANNEL": key, "REWRITE": r.get(key)}
        rewrites.append(d)
    print rewrites
    database.db_session().rewrite.insert(rewrites)
    logger.debug("cp_rewrite_toDB end.")
示例#6
0
def get_search_result_by_rid_webluker(rid, username):
    '''
    根据rid查询刷新通用结果
    '''
    results = []
    result_cache = redisfactory.getDB(3)

    for rid in rid.split(','):
        result = result_cache.get(username + rid)
        if not result:
            refresh_request = db_s1.task_forward.find_one({'task_id': rid})

            if refresh_request:
                created_time = refresh_request.get('created_time')
                finish_time = refresh_request.get('finish_time')
                # contrast finish_time and current time
                finish_time_timestamps = time.mktime(finish_time.timetuple())
                time_now = datetime.datetime.now()
                time_now_timestamps = time.mktime(time_now.timetuple())
                flag = True if finish_time_timestamps < time_now_timestamps else False
                if finish_time_timestamps > time_now_timestamps:
                    finish_time = time_now
                totalTime = (finish_time - created_time).seconds

                # totalTime = (refresh_request.get('finish_time', datetime.datetime.now())
                #          - refresh_request.get('created_time')).seconds
                urlStatus = get_urlStatus_webluker(refresh_request.get('urls'),
                                                   success=flag)

                urlStatus2 = get_urlStatus_webluker(
                    refresh_request.get('dirs'), success=flag)
                urlStatus.extend(urlStatus2)
                # successCount = 0 if flag else (len(refresh_request.get('urls')) + len(refresh_request))
                result = {
                    'r_id': rid,
                    'status': 'UNKNOWN' if not flag else 'SUCCESS',
                    'createdTime': str(refresh_request.get('created_time')),
                    'finishedTime': str(finish_time) if flag else None,
                    'successRate': 1 if flag else 0,
                    'totalTime': totalTime,
                    'username': username,
                    'urlStatus': urlStatus
                }
                if result.get('status') == 'SUCCESS':
                    result_cache.set(username + rid, json.dumps(result))
                    result_cache.expire(username + rid, 300)
            else:
                result = {'r_id': rid, 'msg': '%s not found.' % rid}
        else:
            result = json.loads(result)
        results.append(result)
    return results
示例#7
0
def sadd_hpcc_urlid(urls, devices, type_t='HPCC'):
    """
    add hpcc urlid in to redis
    Args:
        urls:
        devices:
        type_t:

    Returns:

    """
    try:
        if urls and devices:
            RESULT_REFRESH = redisfactory.getDB(5)
            for url in urls:
                id = url.get('id')
                urlid_id_type_t = 'URLID_' + str(id) + "_" + type_t
                for dev in devices:
                    host = dev.get('host')
                    type_dev = dev.get('type')
                    code = dev.get('code')
                    if type_dev == 'HPCC' and (code != 204):

                        RESULT_REFRESH.sadd(urlid_id_type_t, host)
                # set expire time
                RESULT_REFRESH.expire(urlid_id_type_t, expire_time)
                smember_type_t = RESULT_REFRESH.smembers(urlid_id_type_t)
                if not smember_type_t:
                    url_id_type_t_r = urlid_id_type_t + "_R"
                    rid_id_type_t = RESULT_REFRESH.get(url_id_type_t_r)
                    smember_rid_type_t = RESULT_REFRESH.smembers(rid_id_type_t)
                    if not smember_rid_type_t:
                        # _F   finish time
                        RESULT_REFRESH.set(rid_id_type_t + '_F', time.time())
                        RESULT_REFRESH.expire(rid_id_type_t + '_F',
                                              expire_time)
    except Exception, e:
        logger.error('sadd_hpcc_urlid error:%s' % traceback.format_exc(e))
        logger.info('sadd_hpcc_urlid urls:%s, devices:%s' % (urls, devices))
示例#8
0
import os
import core.redisfactory as redisfactory
#from pymongo import ReplicaSetConnection, Connection
from pymongo import MongoClient
from core.database import query_db_session

LOG_FILENAME = '/Application/bermuda/logs/update_configure.log'
logging.basicConfig(
    filename=LOG_FILENAME,
    format=
    '%(asctime)s - %(name)s - %(levelname)s - %(process)d - Line:%(lineno)d - %(message)s',
    level=logging.INFO)

logger = logging.getLogger('update_configure')
logger.setLevel(logging.DEBUG)
PRELOAD_DEVS = redisfactory.getDB(5)
REWRITE_CHANNEL = redisfactory.getDB(15)
batch_size = 500

#db = Connection("172.16.31.222", 27017)['bermuda']#
# db = Connection('mongodb://*****:*****@%s:27017/bermuda' %('101.251.97.145'))['bermuda']
# db = Connection("101.251.97.201", 27017)['bermuda']
# db = ReplicaSetConnection('%s:27017,%s:27017,%s:27017' % ('172.16.12.136', '172.16.12.135', '172.16.12.134'), replicaSet ='bermuda_db')['bermuda']

# active_db = Connection("223.202.52.134", 27017)['bermuda']
# active_db = ReplicaSetConnection('%s:27017,%s:27017,%s:27017' % ('223.202.52.134', '223.202.52.135', '223.202.52.136'), replicaSet ='bermuda_db')['bermuda']
#SH
#s_uri = 'mongodb://*****:*****@%s:27017,%s:27017,%s:27017/bermuda?replicaSet=bermuda_db' % ('172.16.31.222','172.16.32.254','172.16.32.3')
#BJ
s_uri = 'mongodb://*****:*****@%s:27017,%s:27017,%s:27017/bermuda?replicaSet=bermuda_db' % (
    '172.16.12.136', '172.16.12.135', '172.16.12.134')
示例#9
0
formatter = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(process)d - Line:%(lineno)d - %(message)s")
fh = logging.FileHandler(LOG_FILENAME)
fh.setFormatter(formatter)

logger = logging.getLogger('region_devs_retry')
logger.addHandler(fh)
logger.setLevel(logging.DEBUG)

# db = query_db_session()
# db = MongoClient("mongodb://*****:*****@172.16.21.205/bermuda", 27017)['bermuda']
# db = MongoClient("mongodb://*****:*****@172.16.21.205:27017/bermuda, 27017)['bermuda']
# db = MongoClient("mongodb://*****:*****@223.202.52.135/bermuda", 27017)['bermuda']
db =database.query_db_session()
preload_cache = redisfactory.getDB(1)
PRELOAD_DEVS = redisfactory.getDB(5)
MONITOR_USER = ["cztv"]
config = initConfig()


def get_result_by_id(url_id):
    try:
        result = preload_cache.get(url_id)
        if result:
            return json.loads(result)
        else:
            return db.preload_result.find_one({"_id": ObjectId(url_id)})
    except Exception, e:
        return {}
示例#10
0
from core import redisfactory, authentication, database
from core.query_result import get_search_result_by_rid, get_urlStatus, get_search_result_by_rid_autodesk_new
from .query_utils import query_all_result
from . import adapters
from core.config import config
import traceback

# logger = logging.getLogger('receiver')
# logger.setLevel(logging.DEBUG)
logger = log_utils.get_receiver_Logger()
query = Blueprint(
    'query',
    __name__,
)

result_cache = redisfactory.getDB(3)
query_db_session = database.query_db_session()


@query.route("/content/refresh/<rid>", methods=['GET'])
def search(rid):
    username = request.args.get('username', '')
    password = request.args.get('password', '')
    ticket = authentication.verify(username, password, request.remote_addr)
    # try:
    #     user_list = eval(config.get('task_forward', 'usernames'))
    # except Exception, e:
    #     logger.debug('splitter_new submit error:%s' % traceback.format_exc())
    #     user_list = []
    logger.debug(
        "/content/refresh/  search rid:%s, username:%s, remote_addr:%s" %
示例#11
0
import urllib.parse
import urllib.error
import base64
from . import database
import urllib.request
import urllib.error
import urllib.parse
import traceback
import hashlib
from . import sendEmail
from .models import NTESE_DOMAIN, NTEST_PORT, PASSWORD_NTESE, USERNAME_NTESE, STATUS_UNPROCESSED
from util import log_utils
import time
from core.async_device_result import async_devices
from core import redisfactory
CALLBACK_CACHE = redisfactory.getDB(14)
prefix_callback_email_username = "******"

# logger = logging.getLogger('url_refresh')
# logger.setLevel(logging.DEBUG)

logger = log_utils.get_celery_Logger()

query_db_session = database.query_db_session()


def verify(urls, db, status='FINISHED', devs={}):
    url_status = {}
    for url in urls:
        try:
            try:
示例#12
0
def get_search_result_by_rid_autodesk_new(rid, username):
    '''
    根据rid查询刷新通用结果
    '''
    results = []
    result_cache = redisfactory.getDB(3)
    refresh_result = redisfactory.getDB(5)
    for rid in rid.split(','):
        result = result_cache.get('autodesk' + username + rid)
        if not result:
            refresh_request = query_db_session.request.find_one({
                'username':
                username,
                '_id':
                ObjectId(rid)
            })

            try:
                type_t = config.get('query_type', username)
            except Exception, e:
                logger.error(
                    'get_search_result_by_rid_autodesk_new get quer_type error:%s'
                    % traceback.format_exc(e))
                logger.info("query_type username:%s" % username)
                type_t = "ALL"
            finishedTime = None
            if type_t == 'FC':
                finishedTime_timestamps = refresh_result.get('RID_%s_FC_F' %
                                                             rid)
                if finishedTime_timestamps:
                    # transformation timestamps to datatime
                    finishedTime = datetime.datetime.fromtimestamp(
                        float(finishedTime_timestamps))
            elif type_t == "HPCC":
                finishedTime_timestamps = refresh_result.get('RID_%s_HPCC_F' %
                                                             rid)
                if finishedTime_timestamps:
                    # transformation timestamps to datatime
                    finishedTime = datetime.datetime.fromtimestamp(
                        float(finishedTime_timestamps))
            else:
                finishedTime_timestamps_fc = refresh_result.get('RID_%s_FC_F' %
                                                                rid)
                finishedTime_timestamps_hpcc = refresh_result.get(
                    'RID_%s_HPCC_F' % rid)
                if finishedTime_timestamps_fc and finishedTime_timestamps_hpcc:
                    if float(finishedTime_timestamps_fc) > float(
                            finishedTime_timestamps_hpcc):
                        finishedTime = datetime.datetime.fromtimestamp(
                            float(finishedTime_timestamps_fc))
                    else:
                        finishedTime = datetime.datetime.fromtimestamp(
                            float(finishedTime_timestamps_hpcc))

            if refresh_request:

                # finishedTime = str(refresh_request.get('finish_time_autodesk')) if refresh_request.get(
                #         'finish_time_autodesk') else None
                logger.debug(
                    'test1 get_search_result_by_rid_autodesk refresh_request:%s'
                    % refresh_request)
                # if finishedTime:
                #     remain_time = 0
                # else:
                #     time_now_timestamp = time.time()
                #     executed_end_time_timestamp = refresh_request.get('remain_time_failed_timestamp')
                #     remain_time = executed_end_time_timestamp - time_now_timestamp
                #     # if remain_time bigger 30s, go to normal progress
                #     if remain_time > 0:
                #         pass
                #     else:
                #         remain_time = 0

                logger.debug(
                    'test3 get_search_result_by_rid_autodesk refresh_request:%s'
                    % refresh_request)
                finishedTime = str(finishedTime) if finishedTime else None
                urlStatus = [
                    get_urlStatus_autodesk(u)
                    for u in query_db_session.url.find({"r_id": ObjectId(rid)})
                ]
                successCount = len(
                    [u for u in urlStatus if u.get('code') == 200])
                time_now_timestamp = time.time()
                # refresh_request['remain_time_failed_timestamp'] = time.mktime(refresh_request.get('created_time').timetuple()) + float(100)
                # judge_status_urls = status_urls([u for u in query_db_session.url_autodesk.find({'r_id': ObjectId(rid)})])

                if finishedTime:
                    status = 'SUCCESS'
                    totalTime = float(finishedTime_timestamps) - time.mktime(
                        refresh_request.get('created_time').timetuple())
                    remain_time = 0
                    # remain_time = float(finishedTime_timestamps) - time_now_timestamp
                    # remain_time = refresh_request.get('remain_time_return_timestamp') - time_now_timestamp

                elif not finishedTime:
                    try:
                        if refresh_request.get('remain_time_failed_timestamp'
                                               ) < time_now_timestamp:
                            status = 'SUCCESS'
                            remain_time = 0
                            finishedTime = datetime.datetime.fromtimestamp(
                                refresh_request.get(
                                    'remain_time_failed_timestamp')).strftime(
                                        '%Y-%m-%d %H:%M:%S')
                            totalTime = (
                                datetime.datetime.fromtimestamp(
                                    refresh_request.get(
                                        'remain_time_failed_timestamp')) -
                                refresh_request.get('created_time')).seconds
                            logger.debug(
                                'not judge_status_urls finishedTime:%s' %
                                finishedTime)
                        else:
                            status = 'UNKNOWN'
                            remain_time = refresh_request.get(
                                'remain_time_failed_timestamp'
                            ) - time_now_timestamp
                            finishedTime = None
                            totalTime = (
                                datetime.datetime.now() -
                                refresh_request.get('created_time')).seconds
                            logger.debug(
                                'not judge_status_urls >, remain_time:%s, finishedTime:%s'
                                % (remain_time, finishedTime))
                    except Exception, e:
                        logger.debug(
                            'get_search_result_by_rid_autodesk not judeg_status_urls error:%s'
                            % traceback.format_exc(e))

                # elif successCount == len(urlStatus):
                #     status = 'SUCCESS'
                # else:
                #     status = 'FAILED'
                result = {
                    'r_id':
                    rid,
                    'status':
                    status,
                    'createdTime':
                    str(refresh_request.get('created_time')),
                    'finishedTime':
                    finishedTime,
                    'successRate':
                    1 if successCount == len(urlStatus) else
                    float(successCount) / len(urlStatus),
                    'totalTime':
                    totalTime,
                    'username':
                    username,
                    'urlStatus':
                    urlStatus,
                    'remain_time':
                    remain_time
                }
                if result.get('status') == 'SUCCESS' and remain_time == 0:
                    result_cache.set('autodesk' + username + rid,
                                     json.dumps(result))
                    result_cache.expire('autodesk' + username + rid, 300)
            else:
                result = {'r_id': rid, 'msg': '%s not found.' % rid}
示例#13
0
def get_search_result_by_rid(rid, username):
    '''
    根据rid查询刷新通用结果
    '''
    results = []
    result_cache = redisfactory.getDB(3)

    for rid in rid.split(','):
        result = result_cache.get(username + rid)
        if not result:
            refresh_request = query_db_session.request.find_one({
                'username':
                username,
                '_id':
                ObjectId(rid)
            })
            logger.debug("get_search_result_by_rid refresh_request:%s" %
                         refresh_request)
            if refresh_request and refresh_request.get('task_id'):
                result = get_search_result_by_rid_webluker_one(
                    refresh_request.get('task_id'), username, str(rid))
            else:
                if refresh_request:
                    totalTime = (refresh_request.get('finish_time',
                                                     datetime.datetime.now()) -
                                 refresh_request.get('created_time')).seconds
                    urlStatus = [
                        get_urlStatus(u) for u in query_db_session.url.find(
                            {"r_id": ObjectId(rid)})
                    ]
                    successCount = len(
                        [u for u in urlStatus if u.get('code') == 200])
                    result = {
                        'code':
                        200,
                        'r_id':
                        rid,
                        'status':
                        'UNKNOWN' if refresh_request.get('status')
                        == 'PROGRESS' else 'SUCCESS',
                        'createdTime':
                        str(refresh_request.get('created_time')),
                        'finishedTime':
                        str(refresh_request.get('finish_time'))
                        if refresh_request.get('finish_time') else None,
                        'successRate':
                        1 if successCount == len(urlStatus) else
                        float(successCount) / len(urlStatus),
                        'totalTime':
                        totalTime,
                        'username':
                        username,
                        'urlStatus':
                        urlStatus
                    }
                    if result.get('status') == 'SUCCESS':
                        result_cache.set(username + rid, json.dumps(result))
                        result_cache.expire(username + rid, 300)
                else:
                    result = {
                        'code': 404,
                        'r_id': rid,
                        'msg': '%s not found.' % rid
                    }
        else:
            result = json.loads(result)
        results.append(result)
    return results
完成的目标:修改系统中预加载任务中,超过两个小时  预加载任务的状态仍为progress   把其状态修改为failed
@modify time  2016/6/21
'''

import datetime
from core.preload_worker import set_finished
from util import log_utils
import simplejson as json
from core import redisfactory, database
import os
from core.models import PRELOAD_STATUS_NOT_REPORT

# 链接mongo 句柄
db = database.s1_db_session()
# 链接redis句柄
PRELOAD_CACHE = redisfactory.getDB(1)
logger = log_utils.get_pcelery_Logger()


def get_preload_url_info():
    """
    从preload_url 表中获得状态(status)为  PROGRESS, create_time before two hours
    修改相应的预加载信息,使其status 变为FAILED
    """
    # greater than yesterday's start time  less than the current time of two hours
    now = datetime.datetime.now()
    yesterday = datetime.datetime.now() - datetime.timedelta(days=1)
    yesterday_begin = datetime.datetime.combine(yesterday, datetime.time())
    # print yesterday_begin
    #    result = db.preload_url.find({"status": "PROGRESS", "created_time": {"$lt": (datetime.datetime.now() - datetime.timedelta(hours=2)),
    #                                                                         "$gt": yesterday_begin}})
示例#15
0
@license:
@contact: [email protected]
@site: 
@software: PyCharm
@file: preload_channel_script.py
@time: 16-11-8 上午11:18
"""
import xlrd
import logging
import traceback
from core import redisfactory, rcmsapi
from core.database import query_db_session, db_session
from datetime import datetime
import sys

PRELOAD_DEVS = redisfactory.getDB(5)
db = db_session()
# LOG_FILENAME = '/Application/bermuda/logs/autodesk_postal.log'
LOG_FILENAME = '/home/rubin/logs/preload_channel_script.log'

# logging.basicConfig(filename=LOG_FILENAME,
#                     format='%(asctime)s - %(name)s - %(levelname)s - %(process)d - Line:%(lineno)d - %(message)s',
#                     level=logging.INFO)
formatter = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(process)d - Line:%(lineno)d - %(message)s"
)
fh = logging.FileHandler(LOG_FILENAME)
fh.setFormatter(formatter)

logger = logging.getLogger('preload_channel_script')
logger.addHandler(fh)
示例#16
0
import uuid, asyncore, traceback, datetime, time

from .asyncpostal_r import HttpClient_r
import logging
import core.redisfactory as redisfactory
from util.tools import get_active_devices
from core import database

# connect 82 server
# con83 = MongoClient('mongodb://*****:*****@223.202.52.82:27018/bermuda')
# db = con83.bermuda

db = database.query_db_session()

redis_name_id = redisfactory.getDB(7)

LOG_FILENAME = '/Application/bermuda3/logs/postal_r.log'
# LOG_FILENAME = '/home/rubin/logs/rubin_postal.log'

# logging.basicConfig(filename=LOG_FILENAME,
#                     format='%(asctime)s - %(name)s - %(levelname)s - %(process)d - Line:%(lineno)d - %(message)s',
#                     level=logging.INFO)
formatter = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(process)d - Line:%(lineno)d - %(message)s"
)
fh = logging.FileHandler(LOG_FILENAME)
fh.setFormatter(formatter)

logger = logging.getLogger('postal_r')
logger.addHandler(fh)
示例#17
0
Created on 2012-3-1

@author: wenwen
'''
from core import redisfactory
import urllib.request, urllib.parse, urllib.error
import simplejson as json
from werkzeug.exceptions import Forbidden, Unauthorized, HTTPException
from core.models import URL_OVERLOAD_PER_HOUR, DIR_OVERLOAD_PER_HOUR, PRELOAD_URL_OVERLOAD_PER_HOUR
from util import log_utils
from .database import query_db_session
import traceback

db = query_db_session()

user_cache = redisfactory.getDB(2)  # redis存储的临时数据

CHECKIN_URL = "https://portal.chinacache.com/public-api/checkin.action?%s"  # 通过portal验证
CACHE_TIMEOUT = 1800

logger = log_utils.get_receiver_Logger()


def addCache(user_key, ticket):
    user_cache.set(user_key, ticket)
    user_cache.expire(user_key, CACHE_TIMEOUT)


def verify(username, password, remote_addr):
    '''
        用户验证
示例#18
0
# import receiver
from util.tools import JSONEncoder, load_task, delete_urlid_host, get_mongo_str
from core.config import config
from celery.task import task
# STATUS_RESOLVE_FAILED = 500
from core.models import STATUS_RETRY_SUCCESS, STATUS_RESOLVE_FAILED
import datetime
import traceback



db = db_session()
q_db = query_db_session()
db_s1 = s1_db_session()
# link detection in redis
dev_detect = redisfactory.getDB(7)
logger = log_utils.get_receiver_Logger()
expire_time = 3600


def assemble_command_info(rid, host):
    """
    according url _id and host info, assembly infomation
    :param id_host:str of id,host
    :param id: url collection, _id
    :param host: device id
    :return:
    """
    # RL = RedisLock()
    # has_lock, value = RL.lock(id_host)
    # if has_lock:
示例#19
0
# '172.16.12.136', '172.16.12.135', '172.16.12.134')
# uri = 'mongodb://*****:*****@%s:27017/bermuda' % ('223.202.52.82')
# con = MongoClient(uri)['bermuda']
# REDIS_CLIENT = redis.StrictRedis(host='%s' % '172.16.21.205', port=6379, db=9)
LOG_FILENAME = '/Application/bermuda3/logs/count_device.log'
logging.basicConfig(filename=LOG_FILENAME,
                    format='%(asctime)s - %(name)s - %(levelname)s - %(process)d - Line:%(lineno)d - %(message)s',
                    level=logging.INFO)

logger = logging.getLogger('count_deivce')
logger.setLevel(logging.DEBUG)

# db = database.db_session()
q_db = database.query_db_session()

DEVICE_TYPE = redisfactory.getDB(12)

#RCMS_API = 'https://rcmsapi.chinacache.com/device/name/{0}/apps'
RCMS_API = 'https://cms3-apir.chinacache.com/device/name/{0}/apps'
#RCMS_DEVICES_LAYER = 'https://rcmsapi.chinacache.com/upperlayer/devices'
RCMS_DEVICES_LAYER = 'https://cms3-apir.chinacache.com/upperlayer/devices'
#RCMS_DEVICES = "https://rcmsapi.chinacache.com/devices"
RCMS_DEVICES = "https://cms3-apir.chinacache.com/devices"
# RCMS_DEVICES = "http://rcmsapi-private.chinacache.net:36000/devices"
#  get node info url
#RCMS_DEVICES_NODEINFO = "https://rcmsapi.chinacache.com/nodes"
RCMS_DEVICES_NODEINFO = "https://cms3-apir.chinacache.com/nodes"
RMS_NODE_INFO = "https://ris.chinacache.com/v3/resource/node/businessnodes"

# def get_redis_data(date_str):
#     try:
示例#20
0
from flask import Blueprint, request, make_response, jsonify, Response
import simplejson as json
from core.generate_id import ObjectId
from util import log_utils
from util import rsa_tools, cert_tools, tools
from core import redisfactory, authentication, database, queue
from core import cert_trans_worker, cert_query_worker, transfer_cert_worker
from core.config import config
from util.link_portal_cms import check_cert_cms
from util.link_portal_cms import cert_portal_delete, cert_cms_delete

RECEV_HOST = socket.gethostname()
logger = log_utils.get_cert_Logger()
s1_db = database.s1_db_session()
db = database.db_session()
CERT_CACHE = redisfactory.getDB(10)
HPCC_SAVE_DIR = config.get('cert_trans', 'cache_dir')

certificate_transport = Blueprint(
    'certificate_transport',
    __name__,
)


@certificate_transport.route("/internal/cert/trans", methods=['GET', 'POST'])
def cert_trans():
    '''
    接收portal下发请求
    '''
    try:
        cert_data = request.data
示例#21
0
import simplejson as json
from celery.task import task
from core import rcmsapi, database, command_factory, redisfactory, sendEmail
from cache import api_hope, api_rcms
from core import cert_query_postal
from Crypto.Hash import SHA256
from pymongo.errors import DuplicateKeyError
from pymongo import ReturnDocument
from util import log_utils, rsa_tools, tools
from core.update import db_update
from core.link_detection_all import link_detection_cert, get_failed_cert_devs
from config import config
import logging

WORKER_HOST = socket.gethostname()
CERT_QUERY_CACHE = redisfactory.getDB(4)
s1_db = database.s1_db_session()
db = database.db_session()
logger = log_utils.get_cert_query_worker_Logger()
api_obj = api_hope.ApiHOPE()


@task(ignore_result=True, default_retry_delay=10, max_retries=3)
def dispatch(tasks):
    try:
        logger.debug("dispatch cert_query trans  begin %s" % len(tasks))
        logger.debug("dispatch cert_query trans  begin task_ids %s" %
                     [i['_id'] for i in tasks])
        dev_id, devs, devs_dict = init_cert_dev(tasks[0])
        logger.debug("devs is %s" % devs)
        logger.debug("cert_devs dev_id %s, devs len %s" % (dev_id, len(devs)))
示例#22
0
from asyncpostal import HttpClient
import asyncore
from bson.objectid import ObjectId
import core.redisfactory as redisfactory
from util import log_utils
import time
import datetime
import zlib
from copy import deepcopy
import uuid
from xml.dom.minidom import parseString
from core import database

db_db = database.db_session()

subcenter_factory = redisfactory.getDB(7)
EXPIRETIME = 600
EXPIRETIME_BEST = 10
logger = log_utils.get_cert_query_worker_Logger()

FIRST_HOST = 3


class SubBase(object):
    def __init__(self, dict_sub, **kwargs):

        self.failed_dev_list = dict_sub.get('failed_dev_list')
        self.branch_center_list = dict_sub.get('branch_center_list')
        self.subcenter_port = dict_sub.get('subcenter_port')
        self.subcenter_task_mq = dict_sub.get('subcenter_task_mq')
        self.subcenter_result_mq = dict_sub.get('subcenter_result_mq')
示例#23
0
import socket
from core import redisfactory, rcmsapi, database, queue
from werkzeug.exceptions import InternalServerError
import random
from util.change_url import encode_balank
from util.tools import add_rid_url_info_into_redis
from core.config import config

# logger = logging.getLogger('receiver')
# logger.setLevel(logging.DEBUG)

# logger = log_utils.get_receiver_Logger()
logger = log_utils.get_celery_Logger()

queue_name = 'request_queue'
REWRITE_CACHE = redisfactory.getDB(15)
COUNTER_CACHE = redisfactory.getDB(4)
REGEXCONFIG = redisfactory.getDB(8)
RECEIVER_HOST = socket.gethostname()


def get_refreshurl(username, url):
    '''

    组装URL字典,并根据username设置 layer_type

    Parameters:

        username :  用户

        url :  URL信息
示例#24
0
# -*- coding: utf-8 -*-
import logging, datetime, simplejson, time, traceback, urllib2, hashlib
from core.database import query_db_session, db_session
from core import redisfactory
from core.config import config
import simplejson as json
# from core.generate_id import ObjectId
from bson.objectid import ObjectId
from core.models import URL_OVERLOAD_PER_HOUR, DIR_OVERLOAD_PER_HOUR, PRELOAD_URL_OVERLOAD_PER_HOUR
import uuid

db = db_session()
CACHERECORD = redisfactory.getMDB(7)
H_PRIORITY_CACHE = redisfactory.getDB(15)

PHYSICAL_URL = redisfactory.getDB(9)
PHYSICAL_URL_key = "physical_url"

LOG_FILENAME = '/Application/bermuda/logs/bermuda_tools.log'
# LOG_FILENAME = '/home/rubin/logs/bermuda_tools.log'
formatter = logging.Formatter(
    "%(asctime)s - %(name)s - %(levelname)s - %(process)d - Line:%(lineno)d - %(message)s"
)
fh = logging.FileHandler(LOG_FILENAME)
fh.setFormatter(formatter)

logger = logging.getLogger('monitor_region_devs')
logger.addHandler(fh)
logger.setLevel(logging.DEBUG)

expire_time = 5 * 24 * 60 * 60
示例#25
0
import socket
from core import redisfactory, rcmsapi, database, queue
from werkzeug.exceptions import InternalServerError
from core.config import config
from util.tools import add_rid_url_info_into_redis, judge_contain_chinese
import urllib.request, urllib.parse, urllib.error
import urllib.request, urllib.error, urllib.parse

# logger = logging.getLogger('receiver')
# logger.setLevel(logging.DEBUG)

# logger = log_utils.get_receiver_Logger()
logger = log_utils.get_celery_Logger()

queue_name = 'request_queue'
REWRITE_CACHE = redisfactory.getDB(15)
COUNTER_CACHE = redisfactory.getDB(4)
REGEXCONFIG = redisfactory.getDB(8)
CALLBACK_CACHE = redisfactory.getDB(14)
prefix_callback_email_username = "******"

DOMAIN_IGNORE = redisfactory.getDB(9)
DOMAIN_KEY = 'domain_ignore'

DIRANDURL = redisfactory.getDB(9)
DIRANDURL_KEY = "dir_and_url"

RECEIVER_HOST = socket.gethostname()
# try:
#     user_list_special = eval(config.get('user_special', 'users'))
# except Exception, e:
示例#26
0
def get_search_result_by_rid_webluker_one(rid, username, rid_r):
    """

    Args:
        rid:

    Returns:

    """
    result_cache = redisfactory.getDB(3)
    refresh_request = db_s1.task_forward.find_one({'task_id': rid})

    if refresh_request:
        created_time = refresh_request.get('created_time')
        finish_time = refresh_request.get('finish_time')
        # contrast finish_time and current time
        finish_time_timestamps = time.mktime(finish_time.timetuple())
        time_now = datetime.datetime.now()
        time_now_timestamps = time.mktime(time_now.timetuple())
        flag = True if finish_time_timestamps < time_now_timestamps else False
        if finish_time_timestamps > time_now_timestamps:
            finish_time = time_now
        totalTime = (finish_time - created_time).seconds

        # totalTime = (refresh_request.get('finish_time', datetime.datetime.now())
        #          - refresh_request.get('created_time')).seconds
        webluer_result = refresh_get_data_from_webluker(rid)
        logger.debug(
            "get_search_result_by_rid_webluker_one webluer_result:%s" %
            webluer_result)
        urlStatus = get_urlStatus_webluker(webluer_result,
                                           refresh_request.get('urls'),
                                           success=flag)

        urlStatus2 = get_urlStatus_webluker(webluer_result,
                                            refresh_request.get('dirs'),
                                            success=flag)
        urlStatus.extend(urlStatus2)
        success_count = len(
            [url for url in urlStatus if url.get("code") == 200])

        # successCount = 0 if flag else (len(refresh_request.get('urls')) + len(refresh_request))
        success_rate = 0 if len(
            urlStatus) == 0 else success_count / len(urlStatus)
        result = {
            'code': 200,
            'r_id': rid_r,
            'status': 'UNKNOWN' if
            (success_count != len(urlStatus)) else 'SUCCESS',
            'createdTime': str(refresh_request.get('created_time')),
            'finishedTime': str(finish_time) if flag else None,
            'successRate': success_rate,
            'totalTime': totalTime,
            'username': username,
            'urlStatus': urlStatus
        }
        if result.get('status') == 'SUCCESS':
            result_cache.set(username + rid_r, json.dumps(result))
            result_cache.expire(username + rid_r, 300)
    else:
        result = {'code': 404, 'r_id': rid_r, 'msg': '%s not found.' % rid_r}
    return result
示例#27
0
@time: 17-8-7 下午4:53
"""
import simplejson as json
import traceback
from bson import ObjectId
import math
from util import log_utils
from core.database import query_db_session, db_session
from core import redisfactory

logger = log_utils.get_admin_Logger()

db = db_session()
q_db = query_db_session()

REWRITE_CACHE = redisfactory.getDB(15)


def physical_del_channel_qu(username, channel_original):
    """
    according username,channel_name, to find the relationship with the channel_name,
    :param username: the name of user
    :param channel_name: the name of url
    :return: the list of collection physical_del_channel like [{'_id':xxx, 'username':xxxx, 'channel_list':XXXx},\
          {'_id':xxx, 'username':xxxx, 'channel_list':XXXx}]
    """
    logger.debug(
        "physical_del_channel_query  username:%s, physical_del_channel:%s" %
        (username, channel_original))
    list_physical_del_channel_temp = []
    result_list = []
示例#28
0
def get_search_result_by_rid_autodesk(rid, username):
    '''
    根据rid查询刷新通用结果
    '''
    results = []
    result_cache = redisfactory.getDB(3)

    for rid in rid.split(','):
        result = result_cache.get('autodesk' + username + rid)
        if not result:
            refresh_request = query_db_session.request.find_one({
                'username':
                username,
                '_id':
                ObjectId(rid)
            })
            if refresh_request:

                finishedTime = str(
                    refresh_request.get('finish_time_autodesk')
                ) if refresh_request.get('finish_time_autodesk') else None
                logger.debug(
                    'test1 get_search_result_by_rid_autodesk refresh_request:%s'
                    % refresh_request)
                if finishedTime:
                    remain_time = 0
                    # totalTime = (refresh_request.get('finish_time_autodesk', datetime.datetime.now())
                    #          - refresh_request.get('created_time')).seconds
                    # urlStatus = [get_urlStatus_autodesk(u)
                    #          for u in query_db_session.url_autodesk.find({"r_id": ObjectId(rid)})]
                    # successCount = len([u for u in urlStatus if u.get('code') == 200])
                else:
                    time_now_timestamp = time.time()
                    executed_end_time_timestamp = refresh_request.get(
                        'executed_end_time_timestamp')
                    remain_time = executed_end_time_timestamp - time_now_timestamp
                    # if remain_time bigger 30s, go to normal progress
                    if remain_time > 10:
                        pass
                        # totalTime = (refresh_request.get('finish_time_autodesk', datetime.datetime.now())
                        #      - refresh_request.get('created_time')).seconds
                        # urlStatus = [get_urlStatus_autodesk(u)
                        #      for u in query_db_session.url_autodesk.find({"r_id": ObjectId(rid)})]
                        # successCount = len([u for u in urlStatus if u.get('code') == 200])
                    else:
                        logger.debug('query db and insert data rid:%s' % rid)
                        # auto trigger change status of url_autodesk
                        update_url_request_autodesk(rid)
                        remain_time = 0
                        refresh_request = query_db_session.request.find_one({
                            'username':
                            username,
                            '_id':
                            ObjectId(rid)
                        })
                        logger.debug(
                            'test2 get_search_result_by_rid_autodesk refresh_request:%s'
                            % refresh_request)
                logger.debug(
                    'test3 get_search_result_by_rid_autodesk refresh_request:%s'
                    % refresh_request)

                finishedTime = str(
                    refresh_request.get('finish_time_autodesk')
                ) if refresh_request.get('finish_time_autodesk') else None
                totalTime = (refresh_request.get('finish_time_autodesk',
                                                 datetime.datetime.now()) -
                             refresh_request.get('created_time')).seconds
                urlStatus = [
                    get_urlStatus_autodesk(u) for u in
                    query_db_session.url_autodesk.find({"r_id": ObjectId(rid)})
                ]
                successCount = len(
                    [u for u in urlStatus if u.get('code') == 200])
                time_now_timestamp = time.time()
                judge_status_urls = status_urls([
                    u for u in query_db_session.url_autodesk.find(
                        {'r_id': ObjectId(rid)})
                ])

                if not finishedTime:
                    status = 'UNKNOWN'
                    remain_time = refresh_request.get(
                        'remain_time_return_timestamp') - time_now_timestamp
                elif not judge_status_urls:
                    try:
                        if refresh_request.get('remain_time_failed_timestamp'
                                               ) < time_now_timestamp:
                            status = 'SUCCESS'
                            remain_time = 0
                            finishedTime = datetime.datetime.fromtimestamp(
                                refresh_request.get(
                                    'remain_time_failed_timestamp')).strftime(
                                        '%Y-%m-%d %H:%M:%S')
                            totalTime = (
                                datetime.datetime.fromtimestamp(
                                    refresh_request.get(
                                        'remain_time_failed_timestamp')) -
                                refresh_request.get('created_time')).seconds
                            logger.debug(
                                'not judge_status_urls finishedTime:%s' %
                                finishedTime)
                        else:
                            status = 'UNKNOWN'
                            remain_time = refresh_request.get(
                                'remain_time_failed_timestamp'
                            ) - time_now_timestamp
                            # finishedTime = datetime.datetime.strptime(datetime.datetime.fromtimestamp(
                            #         refresh_request.get('remain_time_failed_timestamp')), '%Y-%m-%d %H:%M:%S')
                            finishedTime = None
                            totalTime = (
                                datetime.datetime.now() -
                                refresh_request.get('created_time')).seconds
                            logger.debug(
                                'not judge_status_urls >, remain_time:%s, finishedTime:%s'
                                % (remain_time, finishedTime))
                    except Exception, e:
                        logger.debug(
                            'get_search_result_by_rid_autodesk not judeg_status_urls error:%s'
                            % traceback.format_exc(e))

                elif successCount == len(urlStatus):
                    status = 'SUCCESS'
                else:
                    status = 'FAILED'
                result = {
                    'r_id':
                    rid,
                    'status':
                    status,
                    'createdTime':
                    str(refresh_request.get('created_time')),
                    'finishedTime':
                    finishedTime,
                    'successRate':
                    1 if successCount == len(urlStatus) else
                    float(successCount) / len(urlStatus),
                    'totalTime':
                    totalTime,
                    'username':
                    username,
                    'urlStatus':
                    urlStatus,
                    'remain_time':
                    remain_time
                }
                if result.get('status') == 'SUCCESS' and remain_time == 0:
                    result_cache.set('autodesk' + username + rid,
                                     json.dumps(result))
                    result_cache.expire('autodesk' + username + rid, 300)
            else:
                result = {'r_id': rid, 'msg': '%s not found.' % rid}
        else:
            result = json.loads(result)
        results.append(result)
示例#29
0
import time
import datetime
import json
from core.database import s1_db_session
from core import redisfactory

CERT_PULL_CACHE = redisfactory.getDB(1)
db = s1_db_session()


def run():
    db.cert_update_pull.ensure_index('update_time', unique=True)
    transfer_cert = []
    add_certs = []
    t = int(time.strftime('%Y%m%d'))  # t=20171212
    g = str(t - 1)
    l = str(t)
    data = db.cert_detail.find({
        'created_time': {
            "$gte": datetime.datetime(int(g[0:4]), int(g[4:6]), int(g[6:8])),
            "$lte": datetime.datetime(int(l[0:4]), int(l[4:6]), int(l[6:8]))
        }
    })
    for d in data:
        co = {
            'cert': d.get('cert', ''),
            'p_key': d.get('p_key', ''),
            's_name': d.get('save_name'),
            'task_id': d.get('save_name', ''),
            'seed': d.get('seed', ''),
            'op_type': d.get('op_type', ''),
示例#30
0
from bson import ObjectId
import logging
import urllib
import math
import re
import pymongo
import simplejson as json
from models import load_url
from util import log_utils
from sys import exit

logger = log_utils.get_admin_Logger()
# logger = logging.getLogger("preload_models")
# logger.setLevel(logging.DEBUG)

preload_cache = redisfactory.getDB(1)
USER_CACHE = redisfactory.getDB(2)
PRELOAD_DEVS = redisfactory.getDB(5)
CACHE_TIMEOUT = 86400
db = db_session()
q_db = query_db_session()
s1_db = s1_db_session()


def get_channels(args):
    per_page = 30
    curpage = args.pop("curpage")
    if not args.get("username"):
        args.pop("username")
    if not args.get("channel_name"):
        args.pop("channel_name")