def print_error(self): '''Print errors''' logging.error("Found %s errors in '%s'", len(self.errors), fstr(self.path, 0)) for err, dsc in self.errors.items(): err = '{0:20}'.format(err) if len(dsc) == 1 and isinstance(dsc[0], list): dsc = dsc[0] logging.error("%s %s", fstr(err), dsc)
def ffmpeg_split(video: str, start: float, duration: float, output: str, audio: bool, is_final_end_of_video: bool, docrop: dict = {}): """ Uses a subprocess call to use FFMPEG to split a video """ if is_final_end_of_video: toend = ["-t", fstr(duration)] else: toend = ["-t", fstr(duration)] split_video = ["ffmpeg", "-nostdin", "-y", "-ss", fstr(start)] + toend + ["-i", video, "-vcodec"] if is_jetson: split_video += [ "copy", ] else: split_video += [ "h264_nvenc", ] if docrop: split_video += [ '-vf', 'crop=%d:%d:%d:%d' % (docrop['width'], docrop['height'], docrop['col0'], docrop['row0']) ] if audio: split_video += ["-acodec", "aac", "-strict", "-2", output] else: assert 0, str( audio ) + "\nHIGHLY suggest including audio in slide videos; it is now considered a bug to have slide videos without audio" split_video += ["-an", output] print(' '.join(split_video)) subprocess.run(split_video, stderr=subprocess.DEVNULL) if is_jetson and False: # WARNING: THE GSTREAMER PIPELINE FOR CROPPING IS BROKEN ON THE JETSON RIGHT NOW, SINCE GSTREAMER OCCASIONALLY FREEZES FOR SOME VIDEOS output_temp = output + '.temp.mpg' cmd = GST_LAUNCH_CMD.format(output, docrop['col0'], docrop['row0'], docrop['col0'] + docrop['width'], docrop['row0'] + docrop['height'], docrop['width'], docrop['height'], output_temp) subprocess.run(cmd, shell=True) shutil.move(output_temp, output)
def check_header(self, lst): logging.info(fstr('Checking header')) diff = set(lst) - set(self.get_header()) if len(diff) > 0: dsc = ', '.join(diff) self.add_error('Missing column', dsc)
def check_label(self): '''Checking label in list''' logging.info(fstr('Checking label')) diff = set(self.label_data.keys()) - set(CONF.XLS_LABEL.keys()) if len(diff) > 0: self.add_error('Unknown label', list(diff))
def check_csv(self): path = self.files['csv'] logging.info("Checking csv '%s'", fstr(path, 0)) self.csvtbl = AmsCsv(path, self.data) lst_stub, lst_non_stub = self.csvtbl.get_stub_info() self.data['stub'] = lst_stub self.data['non_stub'] = lst_non_stub
def collect_files(self): '''Collect result files''' logging.info("Collecting test result files ") for key, path in self.files.items(): if key == 'stub': continue with open(path, errors='ignore') as _: logging.info("%s", fstr(path))
def check_title(self): logging.info(fstr('Checking title')) title = 'Test Case [{0}.csv] [Test CSV Information]' \ .format(self.func) h4 = self.get_title() if title != h4: dsc = '{0} != {1}'.format(h4, title) self.add_error('Mismatch title', dsc)
def check_description(self): '''Get description''' logging.info(fstr('Checking description')) data = self.data def is_simulink(): try: path = data['src_full'] result = any([ 'Simulink model' in line for line in utils.read_file(path)[0:100] ]) except FileNotFoundError: result = None except Exception: result = False finally: return result try: lst = [ l for l in utils.read_file(self.path) if l.startswith('mod') ] lst = lst[0].split(',') func_full = lst[1].strip()[1:-1] description = lst[2].strip()[1:-1] lst_dsc = [data['func'], 'Simulink model'] if func_full != data['func_full']: desc = 'Inconsistent function name {0} != {1}' \ .format(func_full, data['func_full']) self.add_error('Function name', desc) if is_simulink() is True and description != 'Simulink model': desc = "The description should be 'Simulink model'" self.add_error('Description', desc) elif is_simulink() is False and description != data['func']: desc = "The description should be '{0}'".format(data['func']) self.add_error('Description', desc) elif is_simulink() is None and description not in lst_dsc: desc = "The description should be in '{0}'"\ .format(str(lst_dsc)) self.add_error('Description', desc) except Exception as e: logging.exception(e) self.add_error('Parse csv', str(e))
def deliver_result(self, target, template=CONF.FP_TEMPLATE): logging.info("Delivering test result files") dir_spec = Path(target).joinpath(CONF.DIR_SPEC) dir_result = Path(target).joinpath(CONF.DIR_RESULT, self.data['func']) # Copy result files for key, path in self.files.items(): logging.info("%s", fstr(path)) dst = dir_result.joinpath(path.name) utils.copy_file(path, dst) self.files[key] = dst # Copy unit test specification fname = '{0}.xlsx'.format(self.data['func']) dst = dir_spec.joinpath(fname) logging.info("%s", fstr(dst)) utils.copy_file(template, dst) self.files['xlsx'] = dst
def xlsx_update(self): '''Update unit test specification''' path = self.files['xlsx'] logging.info("Updating unit test specification '%s'", fstr(path, 0)) self.wb = load_workbook(path) # Update worksheet 1 self.xlsx_update_ws1() # Update worksheet 6 self.xlsx_update_ws6()
def xlsx_update_ws6(self): logging.info(fstr("Updating worksheet 6")) self.wb.active = 6 ws = self.wb.active lines = [l.replace('\n', '') for l in utils.read_file(self.files['txt'])] row = 5 for i in range(len(lines)): cell = 'A{0}'.format(row + i) ws[cell] = lines[i] # Save excel self.xlsx_save()
def check_coverage(self, path): logging.info("Checking test coverage '%s'", fstr(Path(path), 0)) def get_value(line, char=':'): logging.debug("Parsing line %s", [line]) index = line.index(char) return line[(index+1):].strip() def find_package(src_full): '''Find package base on source path''' path = src_full.split('\\root\\')[0] dirname = Path(path).name config = utils.load_config() package = None for pkg in config.get('packages', {}).keys(): if dirname.startswith(pkg): package = pkg return package lines = utils.read_file(path) keys = ['func_full', 'src_full', 'c0', 'c1', 'mcdc', 'date', 'func', 'src_dir', 'src_name'] data = {keys[i]: get_value(lines[i]) for i in range(len(keys)-3)} # Check C0/C1/MCDC [int(data[k][:-1]) for k in ['c0', 'c1', 'mcdc']] src_rel = data['src_full'].split('\\target\\')[-1] data.update({ 'func': data['func_full'].split('/')[-1], 'src_dir': Path(src_rel).parent, 'src_name': Path(src_rel).name, 'src_rel': src_rel, 'summary': lines[:6], 'package': find_package(data['src_full']), }) # Print test coverage summary space = ' '*3 logging.info("Test coverage summary\n\n%s%s", space, space.join(lines[:6])) return data
def check_index(self, header='No.'): logging.info(fstr('Checking index')) index = self.get_index_header(header) lst = [self.get_number(row[index]) for row in self.table] lst = [n for n in lst if n is not None] # (Index) start at 1 if lst[0] != 1: dsc = 'No.{0}'.format(lst[0]) self.add_error('Index start at', dsc) # (Index) Continuous prev = 0 for tcno in lst: if prev != 0 and tcno != (prev + 1): dsc = 'No.{0} -> {1}'.format(prev, tcno) self.add_error('Index hopping', dsc) prev = tcno
def xlsx_update_ws1(self): logging.info(fstr("Updating worksheet 1")) self.wb.active = 1 ws = self.wb.active ws['F8'] = str(self.data['src_dir']).replace('\\', '/') ws['F9'] = self.data['src_name'] ws['F10'] = self.data['func'] ws['F11'] = '{0}.csv'.format(self.data['func']) f14 = 'テスト結果: {confirm}\n' \ 'C0網羅率 : {c0}\n' \ 'C1網羅率 : {c1}\n' \ 'MC/DC網羅率 : {mcdc}\n'\ '問題点 : {issue}' \ .format(**self.data) ws['F14'] = f14 # Update 1.2 for label, text in CONF.XLS_LABEL.items(): row, col = utils.find_cell(ws, text) lb_data = self.tctbl.label_data.get(label, {}) # r, c = utils.find_cell_next(ws, row, col, count_next=1) if lb_data == {}: ws.cell(row, col+4, '不要') else: ws.cell(row, col+4, '必要') ws.cell(row, col+7, '-') # i = 0 # for cmt, lst in lb_data.items(): # if i > 0: # row = row + 1 # ws.insert_rows(row) # ws.cell(row, col + 5, cmt) # ws.cell(row, col + 6, utils.collapse_list(lst)) # i += 1 # Save excel self.xlsx_save()
def check_confirmation(self, header='Confirmation', header_no='No.'): logging.info(fstr('Checking confirmation')) index_no = self.get_index_header(header_no) index = self.get_index_header(header) dct = { self.get_number(row[index_no]): self.get_text(row[index]) for row in self.table if row[index_no].get('class') not in CONF.TCCLS_CMT } lsterr = [ k for k, v in dct.items() if k is not None and v not in ['OK', 'Fault'] ] if len(lsterr) > 0: dsc = 'No.{0}'.format(', '.join([str(n) for n in lsterr])) self.add_error('Missing confirmation', dsc) else: self.confirm = 'NG' if 'Fault' in dct.values() else 'OK'
def check_tctbl(self): path = self.files['tbl'] logging.info("Checking testcase table '%s'", fstr(path, 0)) self.tctbl = Testcase(path, self.data['func']) self.data['confirm'] = self.tctbl.confirm
# fielddat[videofield]=video_ece_265a_a00_jan_13_le_hybrid_tcns.mp4 else: print("no " + str(videofield) + " to trim for this lecture") else: print('skipping video') for videototrim in vids2trim: print("trimming video \'" + str(videototrim["oldfile"]) + "\'") assert videototrim["oldfile"][-4] == '.', str(videototrim["oldfile"]) oldvideoname = videototrim["oldfile"] newvideoname = videototrim["newfile"] oldloc = videototrim["oldloc"] full_key = f'{univ2lect_key}/{oldvideoname}' file_in = f'{tmpdir}{oldloc}' file_out = f'{tmpdir}{newvideoname}' time_start = videototrim["-ss"] time_durat = videototrim["-to"] - time_start s3_download_file(bucket, full_key, file_in) ffmpeg = ['ffmpeg','-y', '-i', file_in, '-ss', fstr(time_start), '-t', fstr(time_durat), \ '-vcodec','copy', '-acodec','copy', file_out] subprocess_check_output(ffmpeg) prefolder = videototrim["prefolder"] full_key = f'{univ2lect_key}/{prefolder}{newvideoname}' s3_upload_file(bucket, full_key, file_out) if "videofield" in videototrim: getorpostcontent(videototrim['videofield'], newvideoname)
def input_note(self): note = input("Add a note for {} (press {} to finish): ".\ format(colored(self.key, "g"), colored("Enter", "c"))) return utils.fstr(note)