async def handleTimer(timerName, groupId): dataDict = IOUtils.deserializeObjFromPkl( os.path.join(os.getcwd(), 'cn', 'acmsmu', 'FG', 'data', groupId, 'var.pkl')) flag = dataDict['flag'] clu = DailyConclusion.DailyConlusion(groupId) report = clu.generateReport() #print(timerName+'的每日总结为\n'+report) await bot.send_group_msg(group_id=int(groupId), message=report) if flag: dataDict['flag'] = False dataDict['file'] = 'chatB.txt' IOUtils.serializeObj2Pkl( dataDict, os.path.join(os.getcwd(), 'cn', 'acmsmu', 'FG', 'data', groupId, 'var.pkl')) IOUtils.deleteFile( os.path.join(os.getcwd(), 'cn', 'acmsmu', 'FG', 'data', groupId, 'chatA.txt')) else: dataDict['flag'] = True dataDict['file'] = 'chatA.txt' IOUtils.serializeObj2Pkl( dataDict, os.path.join(os.getcwd(), 'cn', 'acmsmu', 'FG', 'data', groupId, 'var.pkl')) IOUtils.deleteFile( os.path.join(os.getcwd(), 'cn', 'acmsmu', 'FG', 'data', groupId, 'chatB.txt'))
def initMetaData(self): print('计算MD5...') MD5 = IOUtils.getMD5(self.path) print(MD5) fileSize = os.path.getsize(self.path) fileName = os.path.basename(self.path) #文件大小小于100M 不分块 if fileSize < ConversionUtils.megabytes2Bytes(100): self.blockNum = 0 else: self.blockNum = IOUtils.getPartionBlockNum(self.path, self.blockSize) metadata = MetaData(fileSize, fileName, MD5, self.blockNum) self.metadataPath = os.path.dirname(self.path) + os.sep + 'METADATA' IOUtils.serializeObj2Pkl(metadata, self.metadataPath) self.fileList.append(self.metadataPath) print('元数据初始化完毕')
def __init__(self, path, port=9000, bufferSize=1, blockSize=100): self.fileList = [] self.path = path self.port = port self.bufferSie = bufferSize self.blockSize = blockSize if os.path.isdir(path): print('该路径是目录,将该目录中的所有文件(包含子文件夹)打包为dirpack.zip') self.path = IOUtils.packageDir(path) self.fileList.append(self.path)
def receiveFileProcess(self): print('SETP1---接收元数据') self.getMetaData() blockNum = self.metadata.blockNum print('SETP2---开始传输数据') if blockNum == 0: print('单文件直接传输,目标文件小于100M 传输中...') NetUtils.receiveSigFile(self.path + os.sep + self.metadata.fileName, self.serverIP, bufferSize=self.bufferSize, verbose=False) else: tPool = [] for i in tqdm(range(blockNum), ascii=True): while self.threadNum >= self.threadNumTop: pass partPath = self.path + os.sep + 'PART' + str(i) t = Thread(target=self.receiveFileSubProcess, args=(partPath, 9000 + i)) self.fileList.append(partPath) t.setDaemon(True) t.start() tPool.append(t) self.threadNum += 1 for t in tPool: while True: if not t.isAlive(): break print('SETP3---开始合并文件') IOUtils.combineFile(self.path, self.path + os.sep + self.metadata.fileName, self.metadata.blockNum) IOUtils.deleteFiles(self.fileList) md5 = IOUtils.getMD5(self.path + os.sep + self.metadata.fileName) print('SETP4---验证文件中...') print(md5) if md5 == self.metadata.MD5: print('完成!') else: print('失败')
async def handleGroupMsg(session): groupInfo = configuration['groupInfo'] for each in groupInfo: if each['groupId'] == str(session['group_id']): # 读取每个群文件夹的pkl dataDict = IOUtils.deserializeObjFromPkl(os.path.join(os.getcwd(),'cn','acmsmu','FG','data',each['groupId'],'var.pkl')) # 确定flag的值 flag = dataDict['flag'] # 确定要往哪一个文件中写入聊天记录 msg = time.strftime('%Y-%m-%d %H:%M:%S', time.localtime()) + ' ' + str(session['user_id']) + '\n' + session['raw_message'] + '\n' if flag: with open(os.path.join(os.getcwd(),'cn','acmsmu','FG','data',each['groupId'],'chatA.txt'), 'a', encoding='utf-8') as fileA: fileA.write(msg) else: with open(os.path.join(os.getcwd(),'cn','acmsmu','FG','data',each['groupId'],'chatB.txt'), 'a', encoding='utf-8') as fileB: fileB.write(msg) break
def __init__(self, groupId): self.__configuration = JsonUtils.json2Dict( os.path.join( os.path.join(os.getcwd(), 'cn', 'acmsmu', 'FG', 'data', 'config.json'))) self.__groupId = groupId dataDict = IOUtils.deserializeObjFromPkl( os.path.join( os.path.join(os.getcwd(), 'cn', 'acmsmu', 'FG', 'data', self.__groupId, 'var.pkl'))) # 确定使用哪个文件 self.__useFile = dataDict['file'] # 结束时间即为运行这个程序的当前时间 self.__endTime = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()) self.__beginTime = None self.__chatlog = self.__cleaning() print(self.__chatlog) for each in self.__configuration['template']: if each['groupId'] == groupId: self.__template = each['content'] break
def serverTransferFileProcess(self): print('服务器IP为:') sip = NetUtils.getLocalIPAddr() print(sip) print('SETP1---初始化元数据') self.initMetaData() print('STEP2---测试端口连通性') self.scanPort() print('STEP3---发送元数据') NetUtils.transferSigFile(self.metadataPath) print('元数据已发送') print('STEP4---开始传输数据') if self.blockNum == 0: print('单文件<100M 传输中...请稍等...') NetUtils.transferSigFile(self.path, bufferSize=self.bufferSie, verbose=True, port=self.port) else: print('文件共分为' + str(self.blockNum) + '块,开始分割文件') blockBytes = ConversionUtils.megabytes2Bytes(self.blockSize) toPath = os.path.dirname(self.path) + os.sep + 'MEtemp' self.fileList.append(toPath) if IOUtils.isDir(toPath): print('MEtemp目录已存在,删除之') IOUtils.deleteFile(toPath) try: os.mkdir(toPath) except FileNotFoundError as reason: print('错误!无法创建目录!') sys.exit(-1) #线程池 tpool = [] #printByNoneAutoNewLine = sys.stdout with open(self.path, 'rb') as orgFile: for i in tqdm(range(self.blockNum), ascii=True): #printByNoneAutoNewLine.write('正在分割第' + str(i + 1) + '块\r\n') totalBufferSize = 0 with open(toPath + os.sep + 'PART' + str(i), 'wb') as toFile: while totalBufferSize < blockBytes: # 缓冲区 data = orgFile.read( ConversionUtils.megabytes2Bytes( self.bufferSie)) if not data: break toFile.write(data) totalBufferSize += ConversionUtils.megabytes2Bytes( self.bufferSie) self.fileList.append(toPath + os.sep + 'PART' + str(i)) #printByNoneAutoNewLine.write(toPath + os.sep+'PART' + str(i)+'\r\n') #printByNoneAutoNewLine.write('分割完成'+'\r\n') t = Thread(target=NetUtils.transferSigFile, args=(toPath + os.sep + 'PART' + str(i), self.port + i, self.bufferSie, False)) t.setDaemon(True) t.start() tpool.append(t) for eachThread in tqdm(tpool, ascii=True): while True: if not eachThread.isAlive(): break print('清理临时文件...') IOUtils.deleteFiles(self.fileList) print('完成!')
''' @desc:内网穿透(映射)服务器主入口 @author: Martin Huang @time: created on 2019/6/21 17:31 @修改记录: ''' import json from Utils.IOUtils import IOUtils from ExternalMain import * #pycharm # from src.main.Utils.IOUtils import * # from src.main.ExternalMain import * import multiprocessing if __name__ == '__main__': str = IOUtils.getConfigJson('config-s.json') #原理同客户端,每个应用一个进程 for eachApp in str.keys(): print(eachApp) appconfig = str.get(eachApp) p = multiprocessing.Process(target=ExternalMain, args=(int(appconfig.get('toPort')), int(appconfig.get('commonPort')), int(appconfig.get('remotePort')))) p.start()
def getMetaData(self): metaDataPath = self.path + os.sep + 'METADATA' self.fileList.append(metaDataPath) NetUtils.receiveSigFile(metaDataPath, self.serverIP) print('元数据接收完毕') self.metadata = IOUtils.deserializeObjFromPkl(metaDataPath)
import nonebot import time import os from cn.acmsmu.FG import Timer from Utils.JsonUtils import JsonUtils from Utils.IOUtils import IOUtils configuration = JsonUtils.json2Dict(os.path.join(os.getcwd(),'cn','acmsmu','FG','data','config.json')) groupInfo = configuration['groupInfo'] for each in groupInfo: fpath = os.path.join(os.getcwd(),'cn','acmsmu','FG','data',each['groupId']) try: dataDict = dict() dataDict['flag'] = True dataDict['file'] = 'chatA.txt' IOUtils.mkdir(fpath) IOUtils.serializeObj2Pkl(dataDict, fpath + '/var.pkl') except FileExistsError: continue bot = nonebot.get_bot() print('初始化完成') @bot.on_message('group') async def handleGroupMsg(session): groupInfo = configuration['groupInfo'] for each in groupInfo: if each['groupId'] == str(session['group_id']): # 读取每个群文件夹的pkl dataDict = IOUtils.deserializeObjFromPkl(os.path.join(os.getcwd(),'cn','acmsmu','FG','data',each['groupId'],'var.pkl')) # 确定flag的值 flag = dataDict['flag']