def mapLargeProcess(ags): mapLog = logging.getLogger(str(os.getpid())) logOutputPath = ags[12] if len(mapLog.handlers) == 0: mapLog = Logger(logname=os.path.join( logOutputPath, "largeGDB_process_%s.log" % os.getpid()), loglevel=3, callfile=str(os.getpid())).get_logger() try: sourceFilePath = ags[0] outputGDBPath = ags[1] serverName = ags[2] templatePath = ags[3] x = ags[4] y = ags[5] sourceFileFields = ags[6] fileEncode = ags[7] convertSr = ags[8] serverInfo = ags[9] pythonPath = ags[10] toolsPyPath = ags[11] mapLog.info('process pid(%s) running...' % str(os.getpid())) sshConn = SSHControl(None, None, mapLog, serverInfo=serverInfo) status = sshConn.ssh_exec_cmd( '%s %s %s %s %s %s %s %s %s %s %s %s' % (pythonPath, toolsPyPath, sourceFilePath, outputGDBPath, serverName, templatePath, x, y, sourceFileFields, str( os.getpid()), fileEncode, convertSr)) sshConn.ssh_close() mapLog.info('proces pid(%s) complate ') return status except: mapLog.warning(traceback.format_exc()) raise
def mapProcess(ags): fileName = ags[0] sourcePath = ags[1] x = ags[2] y = ags[3] sourceFileFields = ags[4] gdbPathList = ags[5] extentList = ags[6] outputPath = gdbPathList.pop() mapLog = logging.getLogger(__file__) if len(mapLog.handlers) == 0: mapLog = Logger(logname=os.path.join( logOutputPath, "largeGDB_process_%s.log" % os.getpid()), loglevel=3, callfile=__file__).get_logger() # 感谢匿名网友指正 try: mapLog.info('PID(%s) convert GDB:%s' % (os.getpid(), fileName)) layerPath = os.path.join(outputPath, fileName) ac.copyLayers(os.path.join(outputPath, "tempLayer"), layerPath) layerFields = ac.getLayerFields(layerPath) # 插入记录到gdb extent = ac.insertRow_Point(layerPath, sourcePath, x, y, sourceFileFields, layerFields, mapLog) extentList.append([ fileName, os.path.split(outputPath)[1] + os.path.sep + fileName, extent ]) # os.remove(sourcePath) # 把源文件删除 gdbPathList.append(outputPath) mapLog.info('PID(%s) convert GDB:%s Done' % (os.getpid(), fileName)) if len(extent) > 0: mapLog.info('PID(%s) GDB extent:%s' % (os.getpid(), "%f,%f,%f,%f" % (extent[0], extent[1], extent[2], extent[3]))) except: gdbPathList.append(outputPath) mapLog.warning(traceback.format_exc()) raise
def encodeProcess(ags): # filePath:数据源路径 # addrField:地址列标题名 # threadNum:线程数量 # geocodeOutput:地理编码结果输出路径 fileIndex = str(ags[0]) filePath = ags[1] fields = ags[2] addrField = ags[3] fileEncode = ags[4] threadNum = ags[5] geocodeOutput = ags[6] geocodeUrl = ags[7] logOutputPath = ags[8] # encodeLog = Logger("geocodeLog_" + str(os.getpid()), os.path.join(geocodeOutput, "processLog")) logId = 'geocode_%s' % str(os.getpid()) geocodeLog = logging.getLogger(logId) if len(geocodeLog.handlers) == 0: geocodeLog = Logger(logname=os.path.join( logOutputPath, "geocode_process_%s.log" % os.getpid()), loglevel=1, callfile=logId).get_logger() # 感谢匿名网友指正 encodeResultPath = os.path.join(geocodeOutput, "geocodeResult") if os.path.exists(encodeResultPath) is not True: os.mkdir(encodeResultPath) try: setdefaulttimeout(5) # 设置超时时间 try: sourceFileObj = file(filePath, 'rb') encodeSuccess = [] encodeError = [] threads = [] geocodeLog.info("analysis file: %s" % fileIndex) for i in xrange(threadNum): th = threading.Thread(target=encodeThread, args=(fileIndex, sourceFileObj, fields, addrField, fileEncode, geocodeUrl, encodeSuccess, encodeError, geocodeLog)) th.start() threads.append(th) for process in threads: process.join() sourceFileObj.close() fileName = os.path.basename(filePath).split('.')[0] errorFile = file( os.path.join(encodeResultPath, fileName + '_error'), 'wb') for i in encodeError: errorFile.write(i) errorFile.close() xyFile = os.path.join(encodeResultPath, fileName + '_success') successFile = file(xyFile, 'wb') for i in encodeSuccess: successFile.write(FIELD_DEL.join(i) + FIELD_DEL + '\n') geocodeLog.info("file done: %s" % filePath) successFile.close() except Exception as e: geocodeLog.warning(traceback.format_exc()) geocodeLog.info("file analysis error: %s--%s" % (filePath, str(e))) return "" except Exception as e: geocodeLog.warning(e.message) geocodeLog.warning(traceback.format_exc()) return ""
timeStr = time.strftime("%Y%m%d", time.localtime()) logger = None logOutputPath = os.path.join(runWorkspace, 'workspace', 'logs', timeStr) if os.path.exists(os.path.join(runWorkspace, 'workspace', 'logs', timeStr)) is not True: os.mkdir(os.path.join(runWorkspace, 'workspace', 'logs', timeStr)) if __name__ == '__main__': logger = Logger(logname=os.path.join(logOutputPath, "updateGDB_main.log"), loglevel=3, callfile=__file__).get_logger() try: strPath = sys.argv[1] # strPath = r'D:\5000W\customer1.gdb,D:\5000W\customer2.gdb' extentList = strPath.split(',') list = [] outputGDB = "" for extent in extentList: if extent.endswith('1.gdb'): outputGDB = os.path.join(extent, 'extent') else: list.append(os.path.join(extent, 'extent')) ac.appendLayers(list, outputGDB) for extent in list: ac.deleteLayer(extent) print "sshsuccess" except Exception as e: logger.warning(traceback.format_exc()) print "ssherror"