700000 ): #Not using exact 750KB as some data will be padded later continue else: sendData() metricData = [] alldata = {} sendData() file.close() updateAgentDataRange(minTimestampEpoch, maxTimestampEpoch) #old file cleaning for dirpath, dirnames, filenames in os.walk(os.path.join(homepath, datadir)): for file in filenames: if ".csv" not in file: continue curpath = os.path.join(dirpath, file) file_modified = datetime.datetime.fromtimestamp( os.path.getmtime(curpath)) if datetime.datetime.now() - file_modified > datetime.timedelta( days=keep_file_days): os.rename(curpath, os.path.join("/tmp", file)) #Update custom Metrics reported = reportCustomMetrics.getcustommetrics(serverUrl, PROJECTNAME, USERNAME, LICENSEKEY, homepath) if reported: print "Custom metrics sent" else: print "Failed to send custom metrics"
maxAmount = chunkMaxSize/(maxSize + chunkingPadding) totalChunkCount = int(math.ceil(float(numlines) / float(maxAmount))) for entry in metricDatas: metricData.append(entry) if len(metricData) < maxAmount: continue; else: sendData(fileMD5) metricData = [] sendData(fileMD5) file.close() updateAgentDataRange(minTimestampEpoch,maxTimestampEpoch) #old file cleaning for dirpath, dirnames, filenames in os.walk(os.path.join(homepath,datadir)): for file in filenames: if ".csv" not in file: continue curpath = os.path.join(dirpath, file) file_modified = datetime.datetime.fromtimestamp(os.path.getmtime(curpath)) if datetime.datetime.now() - file_modified > datetime.timedelta(days=keep_file_days): os.rename(curpath,os.path.join("/tmp",file)) #Update custom Metrics reported = reportCustomMetrics.getcustommetrics(serverUrl, PROJECTNAME, USERNAME, LICENSEKEY, homepath) if reported: print "Custom metrics sent" else: print "Failed to send custom metrics"