def __init__(self):
     self.__timer = 0
     self.__paraSHA1Path = './parameter/SHA1'
     self.__paraFilePath = './parameter/parameter.json'
     self.__tokenLogpath = './log/'
     self.__logfilepath = 'TOKENOPERSTOR_LOG.log'
     # check
     self.__newParaChecker = ParaHashcheck(f_path=self.__paraSHA1Path,
                                           p_path=self.__paraFilePath)
     self.__checkResulfFlag = self.__newParaChecker.checkparameter()
     # parameter
     self.__paraLoder = ParaLoder(self.__paraFilePath,
                                  self.__checkResulfFlag,
                                  'Wechat_interface', 'BerkeleyDB')
     self.__parameter = self.__paraLoder.loadParameter()
     # log handlers
     self.__baseTokenLogger = NewLogger(m_path=self.__tokenLogpath,
                                        f_path=self.__logfilepath)
     self.__tokenLogger = self.__baseTokenLogger.setting()
     # create data dict
     self.__data = {
         'corpid': self.__parameter['Wechat_interface']['corpid'],
         'secret': self.__parameter['Wechat_interface']['secret']
     }
     self.__url = self.__parameter['Wechat_interface']['gettokenurl']
     # bsddb
     self.__dbHandle = BerkeleyDB(
         b_path=self.__parameter['BerkeleyDB']['bdb_path'],
         b_file=self.__parameter['BerkeleyDB']['Token_DB'])
예제 #2
0
 def __init__(self, p_path, chk_result, *p_item):
     self.__checkResult = chk_result
     self.__parameterPath = p_path
     self.__PHlogpath = './log/'
     self.__logfilepath = 'JSON2PARA_LOG.log'
     self.__baselog = NewLogger(m_path=self.__PHlogpath,
                                f_path=self.__logfilepath)
     self.__hashlog = self.__baselog.setting()
     self.__parameterIteam = p_item
     self.resultDict = {}
    def __init__(self, f_path, p_path):
        self.__PHlogpath = './log/'
        self.__logfilepath = 'HASHCHECK_LOG.log'
        self.__baselog = NewLogger(m_path=self.__PHlogpath,
                                   f_path=self.__logfilepath)
        self.__hashlog = self.__baselog.setting()

        self.__hash_text = hashlib.sha1(
            open(file=p_path, mode='r').read().encode('utf-8')).hexdigest()
        self.__hashlog.info('calc\'s hash was: %s' % self.__hash_text)
        self.__file = open(file=f_path, mode='r')
        self.__sha1code = json.loads(self.__file.read())
        self.__hashlog.info('file\'s hash was: %s' %
                            self.__sha1code['para_hashcode'])
예제 #4
0
class ParaLoder(object):
    def __init__(self, p_path, chk_result, *p_item):
        self.__checkResult = chk_result
        self.__parameterPath = p_path
        self.__PHlogpath = './log/'
        self.__logfilepath = 'JSON2PARA_LOG.log'
        self.__baselog = NewLogger(m_path=self.__PHlogpath,
                                   f_path=self.__logfilepath)
        self.__hashlog = self.__baselog.setting()
        self.__parameterIteam = p_item
        self.resultDict = {}

    def loadParameter(self):
        if self.__checkResult == 1:
            parameter_dict = open(file=self.__parameterPath, mode='r')
            para_dict = json.loads(parameter_dict.read())
            for key, iteam in enumerate(self.__parameterIteam):
                self.__hashlog.info('load parameter iteam %s' % iteam)
                self.resultDict.update({iteam: para_dict[iteam]})

            # self.resultDict.update(DB_readonly=para_dict['DB_readonly'])
            # self.resultDict.update(DB_wrightonly=para_dict['DB_wrightonly'])
            # self.resultDict.update(MQ=para_dict['MQ'])
            # self.resultDict.update(Wechat_interface=para_dict['Wechat_interface'])
            # self.resultDict.update(BerkeleyDB=para_dict['BerkeleyDB'])
            self.resultDict.update(result=1)
            self.__hashlog.info('Hash check was success!')
            return self.resultDict
        else:
            self.resultDict.update(result=0)
            self.__hashlog.info(
                'parameter check was failed, please check parameter file :%s')
            return self.resultDict
 def __init__(self):
     # initial
     self.__paraSHA1Path = './parameter/SHA1'
     self.__filePath = './parameter/SHA1'
     self.__paraFilePath = './parameter/parameter.json'
     self.__smwLogpath = './log/'
     self.__smwLogFilePath = 'SMW_LOG.log'
     # check
     self.__newParaChecker = ParaHashcheck(f_path=self.__paraSHA1Path,
                                           p_path=self.__paraFilePath)
     self.__checkResulfFlag = self.__newParaChecker.checkparameter()
     # parameter
     self.__smwParaLoder = ParaLoder(self.__paraFilePath,
                                     self.__checkResulfFlag, 'MQ',
                                     'BerkeleyDB')
     self.__smwParameter = self.__smwParaLoder.loadParameter()
     # log handlers
     self.__baseSMWLogger = NewLogger(m_path=self.__smwLogpath,
                                      f_path=self.__smwLogFilePath)
     self.__smwLogger = self.__baseSMWLogger.setting()
     # create data dict
     self.__headers = {
         'Content-Type': 'application/json',
     }
     self.__service_url = 'http://www.umisu.com/api/wechat-v1/message/send'
     # initial datamodel
     self.__smwDataReplace = DataReplace()
     # bsddb
     self.__dbHandle = BerkeleyDB(
         b_path=self.__smwParameter['BerkeleyDB']['bdb_path'],
         b_file=self.__smwParameter['BerkeleyDB']['SMW_IN_FAIL'])
     # initial MQ parameter
     self.__mq_ip = self.__smwParameter['MQ']['ip']
     self.__mq_user = self.__smwParameter['MQ']['user']
     self.__mq_password = self.__smwParameter['MQ']['passwd']
     self.__mq_exchange = self.__smwParameter['MQ']['exchange']
     self.__mq_routing_key = self.__smwParameter['MQ']['routingkey']
     self.__mq_queue = self.__smwParameter['MQ']['queue']
class ParaHashcheck(object):
    def __init__(self, f_path, p_path):
        self.__PHlogpath = './log/'
        self.__logfilepath = 'HASHCHECK_LOG.log'
        self.__baselog = NewLogger(m_path=self.__PHlogpath,
                                   f_path=self.__logfilepath)
        self.__hashlog = self.__baselog.setting()

        self.__hash_text = hashlib.sha1(
            open(file=p_path, mode='r').read().encode('utf-8')).hexdigest()
        self.__hashlog.info('calc\'s hash was: %s' % self.__hash_text)
        self.__file = open(file=f_path, mode='r')
        self.__sha1code = json.loads(self.__file.read())
        self.__hashlog.info('file\'s hash was: %s' %
                            self.__sha1code['para_hashcode'])

    def checkparameter(self):
        if self.__sha1code['para_hashcode'] == self.__hash_text:
            self.__hashlog.info('Hash check was successful')
            return 1
        else:
            self.__hashlog.info('Hash check was failure!')
            return 0
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id

args.cuda = not args.no_cuda and torch.cuda.is_available()
np.random.seed(args.seed)
torch.manual_seed(args.seed)
# torch.multiprocessing.set_sharing_strategy('file_system')

if args.cuda:
    torch.cuda.manual_seed_all(args.seed)
    cudnn.benchmark = True

# create logger
# Define visulaize SummaryWriter instance
writer = SummaryWriter(logdir=args.check_path, filename_suffix='_first')

sys.stdout = NewLogger(osp.join(args.check_path, 'log.%s.txt' % time.strftime("%Y.%m.%d", time.localtime())))

kwargs = {'num_workers': args.nj, 'pin_memory': False} if args.cuda else {}
if not os.path.exists(args.check_path):
    os.makedirs(args.check_path)

opt_kwargs = {'lr': args.lr, 'lr_decay': args.lr_decay, 'weight_decay': args.weight_decay, 'dampening': args.dampening,
              'momentum': args.momentum}

l2_dist = nn.CosineSimilarity(dim=1, eps=1e-12) if args.cos_sim else nn.PairwiseDistance(p=2)

if args.acoustic_feature == 'fbank':
    transform = transforms.Compose([
        totensor()
    ])
    transform_T = transforms.Compose([
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id

args.cuda = not args.no_cuda and torch.cuda.is_available()
np.random.seed(args.seed)
torch.manual_seed(args.seed)
# torch.multiprocessing.set_sharing_strategy('file_system')

if args.cuda:
    torch.cuda.manual_seed_all(args.seed)
    cudnn.benchmark = True

# create logger
# Define visulaize SummaryWriter instance
writer = SummaryWriter(logdir=args.check_path, filename_suffix='_first')

sys.stdout = NewLogger(osp.join(args.check_path, 'log.txt'))

kwargs = {'num_workers': args.nj, 'pin_memory': False} if args.cuda else {}
if not os.path.exists(args.check_path):
    os.makedirs(args.check_path)

opt_kwargs = {
    'lr': args.lr,
    'lr_decay': args.lr_decay,
    'weight_decay': args.weight_decay,
    'dampening': args.dampening,
    'momentum': args.momentum
}

l2_dist = nn.CosineSimilarity(
    dim=1, eps=1e-12) if args.cos_sim else nn.PairwiseDistance(p=2)
# order to prevent any memory allocation on unused GPUs
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu_id

args.cuda = not args.no_cuda and torch.cuda.is_available()
np.random.seed(args.seed)
torch.manual_seed(args.seed)
torch.multiprocessing.set_sharing_strategy('file_system')

if args.cuda:
    torch.cuda.manual_seed_all(args.seed)
    cudnn.benchmark = True

# create logger
# Define visulaize SummaryWriter instance
kwargs = {'num_workers': args.nj, 'pin_memory': False} if args.cuda else {}
sys.stdout = NewLogger(os.path.join(os.path.dirname(args.resume), 'test.log'))


l2_dist = nn.CosineSimilarity(dim=1, eps=1e-6) if args.cos_sim else PairwiseDistance(2)

if args.input_length == 'var':
    transform = transforms.Compose([
<<<<<<< HEAD:TrainAndTest/test_vox1.py
        # concateinputfromMFB(num_frames=c.NUM_FRAMES_SPECT, remove_vad=False),
        varLengthFeat(remove_vad=args.remove_vad),
    ])
    transform_T = transforms.Compose([
        # concateinputfromMFB(num_frames=c.NUM_FRAMES_SPECT, input_per_file=args.test_input_per_file, remove_vad=False),
        varLengthFeat(remove_vad=args.remove_vad),
=======
        ConcateOrgInput(remove_vad=args.remove_vad),
args.cuda = not args.no_cuda and torch.cuda.is_available()
np.random.seed(args.seed)
torch.manual_seed(args.seed)
# torch.multiprocessing.set_sharing_strategy('file_system')

if args.cuda:
    torch.cuda.manual_seed_all(args.seed)
    cudnn.benchmark = True

# create logger
# Define visulaize SummaryWriter instance
if not os.path.exists(args.xvector_dir):
    os.makedirs(args.xvector_dir)
sys.stdout = NewLogger(
    os.path.join(args.xvector_dir,
                 'log.%s.txt' % time.strftime("%Y.%m.%d", time.localtime())))

kwargs = {'num_workers': args.nj, 'pin_memory': False} if args.cuda else {}
extract_kwargs = {
    'num_workers': args.nj,
    'pin_memory': False
} if args.cuda else {}
opt_kwargs = {
    'lr': args.lr,
    'lr_decay': args.lr_decay,
    'weight_decay': args.weight_decay,
    'dampening': args.dampening,
    'momentum': args.momentum
}
class SendMessagetoWechat(object):
    def __init__(self):
        # initial
        self.__paraSHA1Path = './parameter/SHA1'
        self.__filePath = './parameter/SHA1'
        self.__paraFilePath = './parameter/parameter.json'
        self.__smwLogpath = './log/'
        self.__smwLogFilePath = 'SMW_LOG.log'
        # check
        self.__newParaChecker = ParaHashcheck(f_path=self.__paraSHA1Path,
                                              p_path=self.__paraFilePath)
        self.__checkResulfFlag = self.__newParaChecker.checkparameter()
        # parameter
        self.__smwParaLoder = ParaLoder(self.__paraFilePath,
                                        self.__checkResulfFlag, 'MQ',
                                        'BerkeleyDB')
        self.__smwParameter = self.__smwParaLoder.loadParameter()
        # log handlers
        self.__baseSMWLogger = NewLogger(m_path=self.__smwLogpath,
                                         f_path=self.__smwLogFilePath)
        self.__smwLogger = self.__baseSMWLogger.setting()
        # create data dict
        self.__headers = {
            'Content-Type': 'application/json',
        }
        self.__service_url = 'http://www.umisu.com/api/wechat-v1/message/send'
        # initial datamodel
        self.__smwDataReplace = DataReplace()
        # bsddb
        self.__dbHandle = BerkeleyDB(
            b_path=self.__smwParameter['BerkeleyDB']['bdb_path'],
            b_file=self.__smwParameter['BerkeleyDB']['SMW_IN_FAIL'])
        # initial MQ parameter
        self.__mq_ip = self.__smwParameter['MQ']['ip']
        self.__mq_user = self.__smwParameter['MQ']['user']
        self.__mq_password = self.__smwParameter['MQ']['passwd']
        self.__mq_exchange = self.__smwParameter['MQ']['exchange']
        self.__mq_routing_key = self.__smwParameter['MQ']['routingkey']
        self.__mq_queue = self.__smwParameter['MQ']['queue']

    def SendMessage(self):
        # get token
        mainToken = TokenOperator()
        token_resp = mainToken.getToken()
        # connect to mq
        rtflag, connect, parameter = createMQengine(self.__mq_ip,
                                                    self.__mq_user,
                                                    self.__mq_password)
        self.__smwLogger.info('createChannel')
        channel = connect.channel()
        self.__smwLogger.info('declareQueue')
        queue_name = channel.queue_declare(queue=self.__mq_queue, durable=True)
        self.__smwLogger.info('bindQueue')
        channel.queue_bind(exchange=self.__mq_exchange,
                           queue=self.__mq_queue,
                           routing_key=self.__mq_routing_key)

        def cb(ch, method, properties, body):
            # 取出accesstoken值
            return_status = ''
            # access_token = token_resp['AccessToken'] #20190326
            access_token = token_resp
            self.__smwLogger.warning('access token : %s' % access_token)
            rcv_msg = json.loads(body)
            # check_rest = checkToken(token_resp) #20190326 去除
            fail_id = []
            succ_id = []
            for enu, rcv_list in enumerate(rcv_msg):
                broken_timer = 0
                # print(enu,rcv_list,'out')
                while broken_timer <= 2:
                    # 每次计数,最多发送三次,失败计数
                    msg_body = dataModel(rcv_list[3], rcv_list[4])
                    params = (('access_token', token_resp), )
                    data = '{"msgtype":"text","agentid":1,"text":{"content":"%s"},"key":"SALPRC_FB_GRP"}' % (
                        msg_body)
                    data = data.encode('utf-8').decode('latin1')
                    response = requests.post(self.__service_url,
                                             headers=self.__headers,
                                             params=params,
                                             data=data)
                    response_text = json.loads(response.text)

                    if response_text['errcode'] == 0:
                        # 确认发送完成后重置计数器
                        broken_timer = 999
                        return_status = time.strftime(
                            "%Y-%m-%d %H:%M:%S",
                            time.localtime()) + ' ' + 'success!'
                        self.__smwLogger.info('send message success!')
                        # print(return_status)
                        succ_id.append(rcv_list[0])
                    else:
                        # 发送失败后记录
                        broken_timer += 1
                        return_status = time.strftime(
                            "%Y-%m-%d %H:%M:%S", time.localtime()
                        ) + ' ' + 'failed to send message ,please check interface return message:' + response_text[
                            'errmsg']
                        self.__smwLogger.warning(
                            'failed to send message ,please check interface return message: %s'
                            % response_text['errmsg'])
                        if broken_timer == 2:
                            '''
                            记录没有发送成功的这个数据行id
                            '''
                            return_status = time.strftime(
                                "%Y-%m-%d %H:%M:%S", time.localtime()
                            ) + ' ' + 'access token was failure 3 times ,system will be record failured event'
                            self.__smwLogger.warning(
                                'access token was failure 3 times ,system will be record failured event'
                            )
                            # print(return_status)
                            fail_id.append(rcv_list[0])
                            # 此处将发送失败的rcv_list[0]记入BerkeleyDB,用于返回信息
                        else:
                            return_status = time.strftime(
                                "%Y-%m-%d %H:%M:%S", time.localtime()
                            ) + ' ' + 'something was wrong, system will try again'
                            self.__smwLogger.warning(
                                'something was wrong, system will try again')
                            # print(return_status)
                            access_token = mainToken.getToken(source='SERVER')

            ch.basic_ack(delivery_tag=method.delivery_tag)

        channel.basic_qos(prefetch_count=1)
        channel.basic_consume(cb, queue=self.__mq_queue, no_ack=False)  #
        print('please wait for message , to exit press ctrl+c')
        # start get data
        channel.start_consuming()
class TokenOperator(object):
    def __init__(self):
        self.__timer = 0
        self.__paraSHA1Path = './parameter/SHA1'
        self.__paraFilePath = './parameter/parameter.json'
        self.__tokenLogpath = './log/'
        self.__logfilepath = 'TOKENOPERSTOR_LOG.log'
        # check
        self.__newParaChecker = ParaHashcheck(f_path=self.__paraSHA1Path,
                                              p_path=self.__paraFilePath)
        self.__checkResulfFlag = self.__newParaChecker.checkparameter()
        # parameter
        self.__paraLoder = ParaLoder(self.__paraFilePath,
                                     self.__checkResulfFlag,
                                     'Wechat_interface', 'BerkeleyDB')
        self.__parameter = self.__paraLoder.loadParameter()
        # log handlers
        self.__baseTokenLogger = NewLogger(m_path=self.__tokenLogpath,
                                           f_path=self.__logfilepath)
        self.__tokenLogger = self.__baseTokenLogger.setting()
        # create data dict
        self.__data = {
            'corpid': self.__parameter['Wechat_interface']['corpid'],
            'secret': self.__parameter['Wechat_interface']['secret']
        }
        self.__url = self.__parameter['Wechat_interface']['gettokenurl']
        # bsddb
        self.__dbHandle = BerkeleyDB(
            b_path=self.__parameter['BerkeleyDB']['bdb_path'],
            b_file=self.__parameter['BerkeleyDB']['Token_DB'])

    def getTokenFromServer(self):
        while self.__timer <= 2:
            response = requests.post(self.__url, data=self.__data)
            inner_resp = json.loads(response.text)
            if inner_resp['errcode'] == 0:
                self.__dbHandle.insertdata(key='TOKEN',
                                           value=inner_resp['AccessToken'])
                self.__timer = 999
                self.__tokenLogger.info('Get token success!')
                return inner_resp['AccessToken']
            else:
                self.__tokenLogger.warning(
                    'Get token failed ,system will be try it later...(%d/3)' %
                    self.__timer + 1)
                time.sleep(pow(self.__timer + 1, 3))
                self.__timer += 1
                if self.__timer == 2:
                    self.__tokenLogger.warning('Get token failed 3 times')
                    inner_resp = 0
                    return inner_resp

    def getTokenFromDB(self):
        innerdata = self.__dbHandle.readpairdate(key='Token')
        if innerdata:
            return innerdata.decode(encoding='utf-8')
        else:
            return 0

    def getToken(self, source='DB'):
        if source == 'DB':
            innerData = self.getTokenFromDB()
            if innerData == 0:
                innerData = self.getTokenFromServer()
            return innerData
        elif source == 'SERVER':
            innerData = self.getTokenFromServer()
            return innerData
        else:
            self.__tokenLogger.warning('getToken function parameter error')
            innerData = self.getTokenFromDB()
            return innerData

    def checkToken(self):
        pass
if __name__ == "__main__":
    np.random.seed(args.seed)
    torch.manual_seed(args.seed)
    random.seed(args.seed)
<<<<<<< HEAD

=======
>>>>>>> Server/Server
    nj = args.nj
    data_dir = args.data_dir
    out_dir = os.path.join(args.out_dir, args.out_set)

<<<<<<< HEAD
=======
    sys.stdout = NewLogger(
        os.path.join(out_dir, 'log', 'egs.%s.conf' % time.strftime("%Y.%m.%d", time.localtime())))

    print('\nCurrent time is \33[91m{}\33[0m.'.format(str(time.asctime())))
    opts = vars(args)
    keys = list(opts.keys())
    keys.sort()

    options = []
    for k in keys:
        options.append("\'%s\': \'%s\'" % (str(k), str(opts[k])))

    print('Preparing egs options: \n{ %s }' % (', '.join(options)))

>>>>>>> Server/Server
    if not os.path.exists(out_dir):
        os.makedirs(out_dir)
'''

paraSHA1Path = './parameter/SHA1'
filePath = './parameter/SHA1'
paraFilePath = './parameter/parameter.json'
getDataLogpath = './log/'
getDataLogFilePath = 'GETDATA_LOG.log'
# check
newParaChecker = ParaHashcheck(f_path=paraSHA1Path, p_path=paraFilePath)
checkResulfFlag = newParaChecker.checkparameter()
# parameter
getDataParaLoder = ParaLoder(paraFilePath, checkResulfFlag, 'MQ',
                             'DB_readonly', 'DB_wrightonly')
getDataParameter = getDataParaLoder.loadParameter()
# log handlers
basegetDataLogger = NewLogger(m_path=getDataLogpath, f_path=getDataLogFilePath)
getDataLogger = basegetDataLogger.setting()
# initial MQ parameter
mq_ip = getDataParameter['MQ']['ip']
mq_user = getDataParameter['MQ']['user']
mq_password = getDataParameter['MQ']['passwd']
mq_exchange = getDataParameter['MQ']['exchange']
mq_routing_key = getDataParameter['MQ']['routingkey']
mq_queue = getDataParameter['MQ']['queue']

# DB parameter
# 20190131 DB 库出错,暂时以正式环境替换readonly
# read only instance
db_ip = getDataParameter['DB_readonly']['ip']
db_port = int(getDataParameter['DB_readonly']['port'])
db_sid = getDataParameter['DB_readonly']['sid']