def update_html(cls, bucket, prefix, directory='.', upload_new=True): if not os.path.exists(directory): os.makedirs(directory) filename = directory + '/' + 'metrics.html' # reading tabel parameters from metrics_report.tsv read_file = read_s3(bucket, os.path.join(prefix, 'metrics_report.tsv')) d = {} # read the values into d for line in read_file.rstrip().split('\n'): k, v = line.split('\t') d.setdefault(k, v) # everything is string now # times into datetime objects starttime = datetime.strptime(d['Start_Time'], '%Y-%m-%d %H:%M:%S') try: endtime = datetime.strptime(d['End_Time'], '%Y-%m-%d %H:%M:%S') except: # temporary fix for retrocompatibility if 'End_time' in d: endtime = datetime.strptime(d['End_time'], '%Y-%m-%d %H:%M:%S') else: endtime = datetime.strptime(d['Time_of_Request'], '%Y-%m-%d %H:%M:%S') cost = d['Cost'] if 'Cost' in d else '---' instance = d['Instance_Type'] if 'Instance_Type' in d else '---' # writing with open(filename, 'w') as fo: fo.write( cls.create_html() % (instance, d['Maximum_Memory_Used_Mb'], d['Minimum_Memory_Available_Mb'], d['Maximum_Disk_Used_Gb'], d['Maximum_Memory_Utilization'], d['Maximum_CPU_Utilization'], d['Maximum_Disk_Utilization'], cost, str(starttime), str(endtime), str(endtime - starttime))) if upload_new: upload(filename, bucket, prefix) os.remove(filename)
def test_upload(): randomstr = 'test-' + create_jobid() os.mkdir(randomstr) filepath = os.path.join(os.path.abspath(randomstr), randomstr) with open(filepath, 'w') as f: f.write('haha') upload(filepath, 'tibanna-output', 'uploadtest') s3 = boto3.client('s3') res = s3.get_object(Bucket='tibanna-output', Key='uploadtest/' + randomstr) assert res # cleanup afterwards shutil.rmtree(randomstr) s3.delete_objects(Bucket='tibanna-output', Delete={'Objects': [{ 'Key': 'uploadtest/' + randomstr }]})
def upload(self, bucket, prefix='', lock=True): printlog(str(self.list_files)) for f in self.list_files: upload(f, bucket, prefix) if lock: upload(None, bucket, os.path.join(prefix, 'lock'))