# get logdate from input argument in the format of YYYYMMDD logdate = '20170317' # get appkey information appKeyFilename = APPKEY_FILENAME appKeyfile = open(appKeyFilename, 'r') appKey = appKeyfile.readlines().pop() logtype = 'core' # intro intro.intro_ImportCoreLog() if isLogImport: # instance generation dailycorelogworker = genDataFrame.DailyLogWorker() coreLogDataFrame = DataFrame() if isCRCPASS_CSVwrite: coreLogDataFrame_CRCPASS = DataFrame() # corelogworker init and makefilelist dailycorelogworker.init(logtype, logdate, appKey) dailycorelogworker.makeDailyLogfileList() t = time.time() while dailycorelogworker.nextDailyLogFile(): # data loading dailycorelogworker.loadDailyLog('grep') currDataFrame = dailycorelogworker.getDailyDataFrame(-1) if not (currDataFrame.empty):
appKeyfile = open(appKeyFilename, 'r') appKey = appKeyfile.readlines().pop() logdate = '20170330' device_amount = 500000 # the number of device period = 13 # the number of rcvd trials # beacon 로그 처리 logtype = 'beacon' loadingType = 'json' if isLogDown: downBeaconLogDownWorker = downlog.LogDownloader() downBeaconLogDownWorker.setLogInfo(logdate, logtype, servertype) downBeaconLogDownWorker.downLogRecord() # Beacon Log in json format to dataframe format dailybeaconlogworker = genDataFrame.DailyLogWorker() dailybeaconlogworker.init(logtype, logdate, appKey) dailybeaconlogworker.get_log_files(loadingType) #data frame generation if len(dailybeaconlogworker.mFileNameList) > 0: beaconLogDailyDataFrame = dailybeaconlogworker.getDailyDataFrame(-1) beaconLogDailyDataFrame.sort_values(by='deviceTime') if isCSVwrite: if not (beaconLogDailyDataFrame.empty): BEACONLOG_DATAFRAME_CSV_FILENAME = cm.DAILY_DATAFRAME_CSV_DIR + \ '/' + logdate + '_' \ + logtype + 'Log_' \ + 'DailyDataFrame' +'.csv' beaconLogDailyDataFrame.to_csv(BEACONLOG_DATAFRAME_CSV_FILENAME,
frameSize = -1 # get appkey information appKeyFilename = APPKEY_FILENAME appKeyfile = open(appKeyFilename, 'r') appKey = appKeyfile.readlines().pop() # logdate='20170422' logdate = '20170331' logtype = 'core' # 'core','active','schedule','beacon'가능 logDownWorker = downlog.LogDownloader() logDownWorker.setLogInfo(logdate, logtype, servertype) #logDownWorker.downLogRecord() # 이 주석을 풀면 로그 다운로드 기능을 포함한다 # dailyDataFrameWorker = genDataFrame.DailyLogWorker() dailyDataFrameWorker.init(logtype, logdate, appKey) # logtype logdate appkey로 원하는 로그 정보를 초기화 dailyDataFrameWorker.makeDailyLogfileList( ) # 해당 날짜의 로그폴더에 다운로드된 로그 파일의 리스트를 생성 linenum = 0 while dailyDataFrameWorker.nextDailyLogFile(): # 로그파일을 하나씩 브라우징 하는 루프 dailyDataFrameWorker.loadDailyLog( 'grep') # grep을 이용해서 로그 추출 (json load방식보다 매우 빠름 currDataFrame = dailyDataFrameWorker.getDailyDataFrame( -1) # 로그로 부터 데이터 프레임 생성 linenum = linenum + dailyDataFrameWorker.mDataFrameForLog.getDataFrameSize( ) # 데이터 프레임의 라인수 카우트 print '%s Log linenum = %s' % (logtype, linenum)