def save_video_to_frames(video_filename, output_dir, max_len, min_h, min_w, prefix=''): video_name = prefix + video_filename.split('/')[-1].split('.')[0] cap = cv2.VideoCapture(video_filename) frame_count = 1 video_dir = os.path.join(output_dir, video_name) while frame_count <= max_len: ret, img = cap.read() if not ret: logger.warning( f"{video_filename} len {frame_count} < max_len {max_len}") break h, w, c = img.shape if h < min_h or w < min_w: logger.warning(f"h {h} < min_h {min_h} or w {w} < min_w {min_w}") break make_dirs(video_dir) output_filename = os.path.join(video_dir, f"{frame_count:04d}.png") logger.debug(f" Saving {output_filename}") cv2.imwrite(output_filename, img) frame_count += 1
def evaluate_video_error(result_images, gt_images, masks, flownet_checkpoint_path: str, evaluate_warping_error=True, printlog=True): total_error = 0 total_psnr = 0 total_ssim = 0 total_p_dist = 0 for i, (result, gt, mask) in enumerate(zip(result_images, gt_images, masks)): # mask = np.expand_dims(mask, 2) mse, ssim_value, psnr_value, p_dist = evaluate_image(gt, result) total_error += mse total_ssim += ssim_value total_psnr += psnr_value total_p_dist += p_dist logger.debug( f"Frame {i}: MSE {mse} PSNR {psnr_value} SSIM {ssim_value} " f"Percep. Dist. {p_dist}") if evaluate_warping_error: init_warping_model(flownet_checkpoint_path) # These readers are lists of images # After np.array, they are in shape (H, W, C) # While the required input of the temporal_warping_error is (B, L, C, H, W) # So the tensors are unsqueezed and permuted into such shape targets = torch.Tensor([np.array(x) for x in gt_images ]).unsqueeze(0).permute(0, 1, 4, 2, 3) masks = torch.Tensor( [np.array(x) for x in masks]).unsqueeze(3).unsqueeze(0).permute(0, 1, 4, 2, 3) outputs = torch.Tensor([np.array(x) for x in result_images ]).unsqueeze(0).permute(0, 1, 4, 2, 3) data_input = {"targets": targets, "masks": masks} model_output = {"outputs": outputs} warping_error = temporal_warping_error(data_input, model_output).cpu().item() if printlog: logger.info(f"Warping error: {warping_error}") else: warping_error = None if printlog: logger.info(f"Avg MSE: {total_error / len(result_images)}") logger.info(f"Avg PSNR: {total_psnr / len(result_images)}") logger.info(f"Avg SSIM: {total_ssim / len(result_images)}") logger.info(f"Avg Perce. Dist.: {total_p_dist / len(result_images)}") if total_error == 0: raise IOError("Error = 0") return (warping_error, total_error, total_psnr, total_ssim, total_p_dist, len(result_images))
def get_pkg_object(name=None): """ :param name: :return: """ if not name: logger.debug('name is required to get package object.') return False pkg_obj = Package(name=name) return pkg_obj
def get_build_object(pkg_obj=None, bnum=None): """ :param pkg_obj: :param bnum: :return: :raise AttributeError: """ if not pkg_obj and not bnum: logger.debug('bnum or pkg_obj is required to get build object.') raise AttributeError bld_obj = BuildObject(pkg_obj=pkg_obj, bnum=bnum) return bld_obj
def read_filenames_from_dir(dir_name, reader, max_length=None): logger.debug( f"{reader} reading files from {dir_name}") filenames = [] for root, dirs, files in os.walk(dir_name): assert len(dirs) == 0, f"There are direcories: {dirs} in {root}" assert len(files) != 0, f"There are no files in {root}" filenames = [os.path.join(root, name) for name in sorted(files)] for name in filenames: logger.debug(name) if max_length is not None: return filenames[:max_length] return filenames
def clean_up_after_release(version): status.current_status = 'ISO Release: Cleaning up old files.' logger.debug(status.current_status) all_files = os.listdir(RELEASE_DIR) moved = [] for f in all_files: if version not in f: moved.append(f) shutil.move(f, '/opt/old-iso-images') all_old_files = os.listdir('/opt/old-iso-images') for f in all_old_files: if f not in moved: os.remove(f)
def check_for_new_items(): """ """ db.set("FEED_CHECKED", "True") db.expire("FEED_CHECKED", 900) build_pkgs = [] for service, project_list in MONITOR_ITEMS.iteritems(): logger.debug((service, project_list)) projects = project_list.split(",") logger.debug(projects) for project in projects: if not project or project == "": continue res = None if "github" == service: project = project.split("/") logger.debug(project) res = check_github_repo(project=project[0], repo=project[1]) elif "gitlab" == service: logger.debug(project) res = check_gitlab_repo(project_id=project) if res: build_pkgs = build_pkgs + res if len(build_pkgs) > 0: add_to_build_queue(build_pkgs)
def clean_up_after_release(version): status.current_status = 'ISO Release: Cleaning up old files.' logger.debug(status.current_status) all_files = [os.path.join(RELEASE_DIR, f) for f in os.listdir(RELEASE_DIR)] moved = [] if len(all_files) <= 16: return for f in all_files: files = [os.path.join(RELEASE_DIR, f) for f in os.listdir(RELEASE_DIR)] if version not in f and len(files) > 16: shutil.move(f, '/opt/old-iso-images') moved.append(os.path.basename(f)) old_imgs = '/opt/old-iso-images' all_old_files = [os.path.join(old_imgs, f) for f in os.listdir(old_imgs)] if len(moved) > 0: for f in all_old_files: if os.path.basename(f) not in moved: os.remove(f)
def __init__(self, request=None): if not request: raise ValueError( 'request is required to create a {0} instance.'.format(self.__class__.__name__) ) super().__init__() if isinstance(request, dict): self.is_monitor = True self.request = request self.building = status.now_building if self.is_monitor or self.is_from_authorized_sender(): if self.is_manual: self.process_manual() elif self.is_cnchi and self.request.args.get('result', None) is None: self.process_cnchi_start() elif self.is_cnchi and self.request.args.get('result', None) is not None: install_id = self.request.args.get('install_id', None) result = self.request.args.get('result', None) if install_id is not None and result is not None: logger.debug('Cnchi install_id {0} result is {1}'.format(install_id, result)) result = AntergosInstallation.bool_string_helper(result) logger.debug(result) self.process_cnchi_end(install_id, result) if self.is_github: self.process_github() if len(self.changes) > 0: self.process_changes() else: if not self.result: self.result = 'Nothing to see here, move along ...'
def prep_release(self): status.current_status = 'ISO Release: Step 1/4 - Generating checksum for %s' % self.file_name logger.debug(status.current_status) self.generate_checksums() status.current_status = 'ISO Release: Step 2/4 - Creating detached gpg signature for %s' % self.file_name logger.debug(status.current_status) self.sign_with_gnupg() status.current_status = 'ISO Release: Step 3/4 - Creating torrent file for %s' % self.file_name logger.debug(status.current_status) self.create_torrent_file()
def get_build_object(bnum=None, pkg_obj=None): if not pkg_obj and not bnum: logger.debug('build number is required to get build object.') raise AttributeError bld_obj = BuildObject(bnum=bnum, pkg_obj=pkg_obj) return bld_obj
def do_release(self): status.current_status = 'ISO Release: Step 4/4 - Moving %s to release directory.' % self.file_name logger.debug(status.current_status) for f in self.files: shutil.move(f, RELEASE_DIR)
def get_from_pkgbuild(self, var=None): """ Get a variable from the package's PKGBUILD (which is stored in antergos-packages gh repo). :param var: (str) A variable to extract from the PKGBUILD. :return: (str) The variable's value after extracted from PKGBUILD. """ if var is None: logger.error('get_from_pkgbuild var is none') return '' self.maybe_update_pkgbuild_repo() path = None paths = [os.path.join('/var/tmp/antergos-packages/', self.pkgname), os.path.join('/var/tmp/antergos-packages/deepin_desktop', self.pkgname), os.path.join('/var/tmp/antergos-packages/cinnamon', self.pkgname)] for p in paths: logger.debug(p) if os.path.exists(p): ppath = os.path.join(p, 'PKGBUILD') logger.debug(ppath) if os.path.exists(ppath): path = ppath if p == paths[0] and 'cinnamon' != self.pkgname and len(self.allowed_in) == 0: self.allowed_in.append('main') break else: logger.error('get_from_pkgbuild cant determine pkgbuild path for %s', self.name) return '' parse = open(path).read() dirpath = os.path.dirname(path) if var in ['source', 'depends', 'makedepends', 'arch']: cmd = 'source ' + path + '; echo ${' + var + '[*]}' else: cmd = 'source ' + path + '; srcdir=$CWD; echo ${' + var + '}' if var == "pkgver" and ('git+' in parse or 'cnchi' in self.name or 'git://' in parse): giturl = re.search('(?<=git\\+).+(?="|\')', parse) if giturl: giturl = giturl.group(0) else: giturl = re.search('(?<="|\')git:.+(?="|\')', parse) if giturl: giturl = giturl.group(0) else: giturl = '' gitnm = self.name if self.name == 'pamac-dev': gitnm = 'pamac' elif self.name == 'cnchi-dev': gitnm = 'cnchi' giturl = 'http://github.com/lots0logs/cnchi-dev.git' elif self.name == 'cnchi': giturl = 'http://github.com/antergos/cnchi.git' if os.path.exists(os.path.join(dirpath, gitnm)): shutil.rmtree(os.path.join(dirpath, gitnm), ignore_errors=True) try: subprocess.check_output(['git', 'clone', giturl, gitnm], cwd=dirpath) except subprocess.CalledProcessError as err: logger.error(err.output) cmd = 'source ' + path + '; ' + var proc = subprocess.Popen(cmd, executable='/bin/bash', shell=True, cwd=dirpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if len(out) > 0: out = out.strip() logger.info('proc.out is %s' % out) if len(err) > 0: logger.error('proc.err is %s', err) return out
def process_changes(self): tpl = 'Webhook triggered by <strong>{0}.</strong> Packages added to the build queue: {1}' if self.repo == "antergos-packages": logger.debug("Build hook triggered. Updating build queue.") has_pkgs = False no_dups = [] for changed in self.changes: # logger.info(changed) if len(changed) > 0: for item in changed: # logger.info(item) if item and self.is_gitlab or self.is_numix or self.is_cnchi: pak = item elif item and "PKGBUILD" in item: pak, pkb = item.rsplit('/', 1) pak = pak.rsplit('/', 1)[-1] else: pak = None # logger.info(pak) if pak and 'antergos-iso' != pak: logger.info('Adding %s to the build queue.' % pak) no_dups.append(pak) status.all_packages.add(pak) has_pkgs = True if has_pkgs: the_pkgs = list(set(no_dups)) last_pkg = the_pkgs[-1] html = [] if len(the_pkgs) > 1: html.append('<ul class="hook-pkg-list">') for p in the_pkgs: if p: if len(the_pkgs) > 1: item = '<li>{0}</li>'.format(p) else: item = '<strong>{0}</strong>'.format(p) html.append(item) if p == last_pkg: if self.is_gitlab: source = 'Gitlab' tltype = 2 else: source = 'Github' tltype = 1 if len(the_pkgs) > 1: html.append('</ul>') the_pkgs_str = ''.join(html) tl_event = get_timeline_object(msg=tpl.format(source, the_pkgs_str), tl_type=tltype, packages=the_pkgs) p_obj = package.get_pkg_object(name=p) events = p_obj.tl_events events.append(tl_event.event_id) del p_obj trans_obj = get_trans_object(the_pkgs) status.queue.append(trans_obj.tnum) queue.enqueue_call(builder.handle_hook, timeout=84600) if not self.result: self.result = json.dumps({'msg': 'OK!'})
""" Monitor commit activity on 3rd-party repos. Schedule a build when new commits are detected. """ from utils.logging_config import logger from utils.redis_connection import db from utils.server_status import status import webhook from github3 import login from gitlab import Gitlab import json GITLAB_TOKEN = status.gitlab_token GITHUB_TOKEN = status.github_token ITEMS_HASH = db.hgetall("antbs:monitor:list") or False logger.debug(type(ITEMS_HASH)) MONITOR_ITEMS = ITEMS_HASH if ITEMS_HASH else None def maybe_check_for_new_items(): """ :return: """ return db.exists("FEED_CHECKED") def check_for_new_items(): """
def do_release(self): tpl = 'ISO Release: Step 4/4 - Moving {0} to release directory.' status.current_status = tpl.format(self.file_name) logger.debug(status.current_status) for f in self.files: shutil.move(f, RELEASE_DIR)