def Except(name, dates, e): lock_file = join(lpath, name + '_' + dates + '.lock') logger.error(u'程序执行过程中发生异常, 错误信息如下\n%s', e) try: os.remove(lock_file) logger.info(u'程序正在退出. 删除锁文件 %s', lock_file) except: logger.info(u'程序正在退出. 锁文件 %s不存在无需删除', lock_file) sys.exit(1)
def Start(name, dates): lock_file = join(lpath, name + '_' + dates + '.lock') e = os.path.isfile(lock_file) if e is False: f = open(lock_file, 'w') f.close() logger.info(u'程序开始执行') else: logger.error(u'已存在正在运行的其它实例, 该实例主动退出...') logger.error(u'如需人工干预请删除锁文件后重新执行. 删除命令: rm -f %s', lock_file) sys.exit(1)
def QueryExe(hql, name, dates): lock_file = join(lpath, name + '_' + dates + '.lock') try: transport = TSocket.TSocket(ips, 10001) transport = TTransport.TBufferedTransport(transport) protocol = TBinaryProtocol.TBinaryProtocol(transport) client = ThriftHive.Client(protocol) transport.open() logger.info('Query sql is:\n%s', hql) client.execute(hql) query = client.fetchAll() logger.info('Query sql result is:\n%s', query) transport.close() return (query) except Thrift.TException, tx: logger.error(u'程序执行过程中发生异常, 错误信息如下\n%s', tx.message) os.remove(lock_file) logger.error(u'程序正在退出. 删除锁文件 %s', lock_file) sys.exit(1)
def HiveExe(hql, name, dates): lock_file = join(lpath, name + '_' + dates + '.lock') try: transport = TSocket.TSocket(ips, 10001) transport = TTransport.TBufferedTransport(transport) protocol = TBinaryProtocol.TBinaryProtocol(transport) client = ThriftHive.Client(protocol) transport.open() for sql in hql: logger.info('Executive sql is:\n%s', sql) client.execute(sql) # client.fetchAll() logger.info('Successful implementation of this Sql') transport.close() except Thrift.TException, tx: logger.error(u'程序执行过程中发生异常, 错误信息如下\n%s', tx.message) os.remove(lock_file) logger.error(u'程序正在退出. 删除锁文件 %s', lock_file) sys.exit(1)
def splitFile(srcFile, linesPerFile): ''' Purpose: 把hive导出来的IP文件切割成多个小文件批量执行, 减轻服务器压力的同时提高应用执行效率 @param srcFile: 被分隔的文件, str类型; @param linesPerFile: 分隔后每个小文件包含的行数, int类型; ex: 300记录/文件 ''' desFile = [] line_cnt = 1 file_cnt = 1 s_file = os.path.join(src_path, srcFile) tfile = s_file + '__' + str(file_cnt) fd = open(s_file, 'r') fdx = open(tfile, 'w') for line in fd: if (line_cnt <= linesPerFile): if (len(line) > 0 and line[0].isdigit()): logger.debug('File_cnt %d %d ====== %s', file_cnt, line_cnt, line.rstrip('\n')) fdx.write(line) else: logger.error('skip %s', line.rstrip('\n')) else: fdx.close() desFile.append(basename(tfile)) del fdx file_cnt += 1 tfile = s_file + '__' + str(file_cnt) fdx = open(tfile, 'w') line_cnt = 1 if (len(line) > 0 and line[0].isdigit()): logger.debug('File_cnt %d %d ====== %s', file_cnt, line_cnt, line.rstrip('\n')) fdx.write(line) else: logger.error('skip %s', line.rstrip('\n')) line_cnt += 1 fd.close() fdx.close() desFile.append(basename(tfile)) return desFile
def run(srcFile): ''' Purpose: 执行函数 @param srcFile: 源文件 ''' s_file = os.path.join(src_path, srcFile) stime = time.strftime('%Y%m%d%H%M%S') t_file = os.path.join(dst_path, srcFile + '_' + stime) logger.info(t_file) fd_src = open(s_file, 'r') fd_dst = open(t_file, 'w') rest = TaoBaoRest() for ip in fd_src: ip = ip.translate(None, '\r\n') flag = False while (not flag): try: data = rest.runRest(ip) logger.info('%s ====== %s', ip, data) parser = json.loads(data) flag = True except Exception as e: flag = False logger.error('Occur exception:\n%s', e) logger.error('%s <------> %s', ip, data) rest.reConn() Ret = parser['code'] if (0 == Ret): data = parser['data'] for k in keys: fd_dst.write(data[k].encode('UTF-8') + '\001') fd_dst.write('\n') else: logger.error('Ret = %d , discard ip: %s', Ret, ip) # rest.reConn() fd_src.close() fd_dst.close() del rest