def update_and_push_github(self, var=None, old_val=None, new_val=None): """ :param var: :param old_val: :param new_val: :return: """ if self.push_version != "True" or old_val == new_val: return gh = login(token=self.gh_user) repo = gh.repository('antergos', 'antergos-packages') tf = repo.file_contents(self.name + '/PKGBUILD') content = tf.decoded search_str = '%s=%s' % (var, old_val) replace_str = '%s=%s' % (var, new_val) content = content.replace(search_str, replace_str) ppath = os.path.join('/opt/antergos-packages/', self.name, '/PKGBUILD') with open(ppath, 'w') as pbuild: pbuild.write(content) pbuild.close() commit = tf.update( '[ANTBS] | Updated %s to %s in PKGBUILD for %s' % (var, new_val, self.name), content) if commit and commit['commit'] is not None: try: logger.info('@@-package.py-@@ | commit hash is %s', commit['commit'].sha) except AttributeError: pass return True else: logger.error('@@-package.py-@@ | commit failed') return False
def __init__(self, name): super(Package, self).__init__(self, name=name) self.maybe_update_pkgbuild_repo() try: if (not self.pkgname or self.pkgname == '') and os.path.exists(os.path.join(REPO_DIR, name)): key_lists = ['redis_string', 'redis_string_bool', 'redis_string_int', 'redis_list', 'redis_zset'] for key_list_name in key_lists: key_list = self.all_keys[key_list_name] for key in key_list: if key_list_name.endswith('string') and key != 'name': setattr(self, key, '') elif key_list_name.endswith('bool'): setattr(self, key, False) elif key_list_name.endswith('int'): setattr(self, key, 0) elif key_list_name.endswith('list'): setattr(self, key, RedisList.as_child(self, key, str)) elif key_list_name.endswith('zset'): setattr(self, key, RedisZSet.as_child(self, key, str)) self.pkgname = name next_id = db.incr('antbs:misc:pkgid:next') self.pkg_id = next_id all_pkgs = status.all_packages() all_pkgs.add(self.name) except Exception: logger.error('unable to init package object for %s', name)
def evaluate_video( result_video_dir, gt_video_dir, video_mask_dir, flownet_checkpoint_path ): result_frame_reader = FrameReader(result_video_dir, evaluate_image=True).files gt_frame_reader = FrameReader(gt_video_dir, evaluate_image=True).files if os.path.exists(video_mask_dir): masks = MaskReader(video_mask_dir)[:len(gt_frame_reader)] else: raise IOError("{video_mask_dir} not exists") # print(len(result_frame_reader)) # print(len(gt_frame_reader)) # print(len(masks)) # result_frame_reader[0].save('1.png') # gt_frame_reader[1].save('2.png') # masks[2].save('3.png') # exit() if len(masks) != len(result_frame_reader): logger.error("Size mismatch") return evaluate_video_error(result_frame_reader, gt_frame_reader, masks, flownet_checkpoint_path)
def maybe_update_pkgbuild_repo(): try: if not os.path.exists('/var/tmp/antergos-packages'): subprocess.check_call( ['git', 'clone', 'http://github.com/antergos/antergos-packages'], cwd='/var/tmp') else: subprocess.check_call(['git', 'reset', '--hard', 'origin/master'], cwd='/var/tmp/antergos-packages') subprocess.check_call(['git', 'pull'], cwd='/var/tmp/antergos-packages') except subprocess.CalledProcessError as err: logger.error(err)
def main(args): logger.info(f"Checking {args.root_dir}") with open(args.dirname_list, 'r') as fin: dirnames = [line.split()[0] for line in fin.readlines()] # dirnames = os.listdir(args.root_dir) for i, dirname in enumerate(dirnames): if i % 2000 == 0: logger.info(f"Checking no. {i}") dirpath = os.path.join(args.root_dir, dirname) filenames = os.listdir(dirpath) if len(filenames) != args.length: logger.error(f"{dirpath} len {len(filenames)}")
def main(args): make_dirs(args.output_root_dir) dirnames = read_dirnames_under_root(args.input_root_dir) for dirname in dirnames: try: output_path = os.path.join(args.output_root_dir, f"{dirname}.mp4") dirpath = os.path.join(args.input_root_dir, dirname, args.input_postfix) reader = FrameReader(dirpath, max_length=args.max_len) reader.write_files_to_video(output_path) except Exception as err: logger.error(err, exc_info=True)
def process_manual(self): index = self.manual_trans_index try: key = db.lrange('antbs:github:payloads:index', -index, -index) logger.info(key) logger.info(key[0]) self.payload = db.hgetall(key[0]) except Exception as err: logger.error(err) self.result = 500 return self.commits = ast.literal_eval(self.payload['commits']) self.is_github = True
def evaluate_video(result_video_dir: str, gt_video_dir: str, video_mask_dir: str, flownet_checkpoint_path: str): result_frame_reader = FrameReader(result_video_dir).files gt_frame_reader = FrameReader(gt_video_dir).files if os.path.exists(video_mask_dir): masks = MaskReader(video_mask_dir)[:len(gt_frame_reader)] else: raise IOError(f"{video_mask_dir} not exists") if len(masks) != len(result_frame_reader): logger.error("Size mismatch") return evaluate_video_error(result_frame_reader, gt_frame_reader, masks, flownet_checkpoint_path)
def get_from_pkgbuild(self, var=None): if var is None: logger.error('get_from_pkgbuild var is none') return '' self.maybe_update_pkgbuild_repo() path = None paths = [os.path.join('/var/tmp/antergos-packages/', self.name), os.path.join('/var/tmp/antergos-packages/deepin_desktop', self.name), os.path.join('/var/tmp/antergos-packages/cinnamon', self.name)] for p in paths: if os.path.exists(p): path = os.path.join(p, 'PKGBUILD') if p == paths[0] and 'cinnamon' != self.pkgname and len(self.allowed_in()) == 0: self.allowed_in().append('main') break else: logger.error('get_from_pkgbuild cant determine pkgbuild path') parse = open(path).read() dirpath = os.path.dirname(path) if var in ['source', 'depends', 'makedepends', 'arch']: cmd = 'source ' + path + '; echo ${' + var + '[*]}' else: cmd = 'source ' + path + '; echo ${' + var + '}' if var == "pkgver" and ('git+' in parse or self.name == 'cnchi-dev'): giturl = re.search('(?<=git\\+).+(?="|\')', parse) if giturl: giturl = giturl.group(0) pkgdir, pkgbuild = os.path.split(path) gitnm = self.name if self.name == 'pamac-dev': gitnm = 'pamac' elif self.name == 'cnchi-dev': giturl = 'http://github.com/lots0logs/cnchi-dev.git' if os.path.exists(os.path.join(pkgdir, gitnm)): shutil.rmtree(os.path.join(pkgdir, gitnm), ignore_errors=True) try: subprocess.check_output(['git', 'clone', giturl, gitnm], cwd=pkgdir) if self.name == 'cnchi-dev': subprocess.check_output(['tar', '-cf', 'cnchi-dev.tar', 'cnchi-dev'], cwd=pkgdir) except subprocess.CalledProcessError as err: logger.error(err.output) cmd = 'source ' + path + '; ' + var proc = subprocess.Popen(cmd, executable='/bin/bash', shell=True, cwd=dirpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if len(out) > 0: out = out.strip() logger.info('@@-package.py-@@ | proc.out is %s' % out) if len(err) > 0: logger.error('@@-package.py-@@ | proc.err is %s', err) return out
def sync_with_filesystem(self): repodir = os.path.join(self.path, 'x86_64') pkgs = set(p for p in os.listdir(repodir) if '.pkg.' in p and not p.endswith('.sig')) parsed_pkgs = [] for pkg in pkgs: pkg = os.path.basename(pkg) try: pkg, version, rel, suffix = pkg.rsplit('-', 3) except ValueError: logger.error("unexpected pkg: " + pkg) continue pkgver = version + '-' + rel parsed_pkgs.append((pkg, pkgver)) self.pkgs_fs = parsed_pkgs self.pkg_count_fs = len(parsed_pkgs)
def get_landmarks_contour(image, landmarks_predictor_path=DEFAULT_PATH): global predictor if predictor is None: assert os.path.exists(landmarks_predictor_path) predictor = dlib.shape_predictor(landmarks_predictor_path) gray = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY) try: rects = detector(gray, 1) assert len(rects) == 1 landmarks = predictor(gray, rects[0]) landmarks = landmarks_to_np(landmarks) img = connect_landmarks(gray.shape, landmarks) except Exception as err: logger.error(err, exc_info=True) logger.error("Set countour to black") img = np.zeros_like(gray) return img
def iso_release_job(): status.idle = False iso_names = ['antergos-x86_64', 'antergos-i686', 'antergos-minimal-x86_64', 'antergos-minimal-i686'] version = None for name in iso_names: try: pkg_obj = package.Package(name=name) iso = ISOUtility(pkg_obj=pkg_obj) if version is None: version = pkg_obj.pkgver iso.prep_release() iso.do_release() except Exception as err: logger.error(err) if version: clean_up_after_release(version) status.idle = True
def check_github_repo(project=None, repo=None): """ :param project: :param repo: :return: """ new_items = [] gh = login(token=GITHUB_TOKEN) key = 'antbs:monitor:github:{0}:{1}'.format(project, repo) last_id = db.get(key) or '' gh_repo = gh.repository(project, repo) latest = None if repo in ['pamac', 'numix-icon-theme', 'paper-gtk-theme']: commits = gh_repo.commits() try: commit = commits.next() latest = commit.sha except StopIteration: pass else: releases = [r for r in gh_repo.releases()] try: release = releases[0] latest = release.tag_name latest = latest.replace('v', '') except Exception as err: logger.error(err) if latest != last_id: db.set(key, latest) if 'pamac' == repo: repo = 'pamac-dev' elif 'paper-gtk-theme' == repo: repo = 'gtk-theme-paper' elif repo in ['arc-theme', 'Arc-theme']: repo = 'gtk-theme-arc' new_items = repo return new_items
def rm_file_or_dir(src): """ :param src: :return: """ if os.path.isdir(src): try: shutil.rmtree(src) except Exception as err: logger.error(err) return True elif os.path.isfile(src): try: os.remove(src) except Exception as err: logger.error(err) return True else: return True
def iso_release_job(): saved_status = False if not status.idle and 'Idle' not in status.current_status: saved_status = status.current_status else: status.idle = False status.current_status = 'Starting ISO Release Job...' iso_names = ['antergos-x86_64', 'antergos-i686', 'antergos-minimal-x86_64', 'antergos-minimal-i686'] version = None for name in iso_names: try: pkg_obj = package.get_pkg_object(name=name) iso = ISOUtility(pkg_obj=pkg_obj) iso.prep_release() iso.do_release() pkg_obj.iso_url = iso.mirror_url pkg_obj.iso_md5 = iso.md5 if version is None: version = iso.version status.iso_pkgs.add(pkg_obj.name) except Exception as err: logger.error(err) if version and db: # We will use the repo monitor class to check propagation of the new files # before deleting the old files. db.set('antbs:misc:iso-release:do_check', version) if saved_status and not status.idle: status.current_status = saved_status else: status.idle = True status.current_status = 'Idle.'
def create_torrent_file(self): try: trackers = { 'obt': 'udp://tracker.openbittorrent.com:80,', 'cps': 'udp://tracker.coppersurfer.tk:6969,', 'lpd': 'udp://tracker.leechers-paradise.org:6969,', 'dem': 'udp://open.demonii.com:1337' } cmd = ['mktorrent', '-a', trackers['obt'] + trackers['cps'] + trackers['lpd'] + trackers['dem'], '-n', self.file_name, '-o', self.file_name + '.torrent', '-w', self.mirror_url, self.file_name] subprocess.check_output(cmd, cwd=TESTING_DIR) except subprocess.CalledProcessError as err: logger.error(err.output)
def maybe_update_pkgbuild_repo(): """ """ if not db.exists('PKGBUILD_REPO_UPDATED'): if db.setnx('PKGBUILD_REPO_LOCK', True): db.expire('PKGBUILD_REPO_LOCK', 300) try: if os.path.exists('/var/tmp/antergos-packages'): shutil.rmtree('/var/tmp/antergos-packages') subprocess.check_call(['git', 'clone', 'http://github.com/antergos/antergos-packages'], cwd='/var/tmp') db.setex('PKGBUILD_REPO_UPDATED', 350, True) except subprocess.CalledProcessError as err: logger.error(err) db.delete('PKGBUILD_REPO_UPDATED') db.delete('PKGBUILD_REPO_LOCK') return else: while not db.exists('PKGBUILD_REPO_UPDATED') and db.exists('PKGBUILD_REPO_LOCK'): time.sleep(2) return
def add_new_iso_version(self, iso_pkg_obj=None): if iso_pkg_obj is None: logger.error('iso cant be None') return False else: iso_obj = iso_pkg_obj logger.info('adding_new_iso_version: %s', iso_obj) pid = self.post_id_map[iso_obj.pkgname] query = 'json=get_nonce&controller=' + self.dist + '&method=handle_request' post_url = 'https://' + self.dist + '.com/?' + query session = requests.Session() session.mount('http://', SourceAddressAdapter((status.request_from, 0))) session.mount('https://', SourceAddressAdapter((status.request_from, 0))) session.auth = self.auth try: req = session.get(post_url) req.raise_for_status() logger.info(req.text) req = req.json() logger.info(req) if req.get('nonce', False): nonce = req.get('nonce') query = 'json=' + self.dist + '.handle_request&nonce=' post_url = 'https://' + self.dist + '.com/?' + query + nonce + '&api_key=' + API_KEY req = session.post(post_url, data=dict(pid=pid, url=iso_obj.iso_url, md5=iso_obj.iso_md5, version=iso_obj.pkgver)) req.raise_for_status() logger.info(req.text) self.success = True except Exception as err: self.success = False logger.error(err) return False return True
def signal_handler(signum, frame): manager.filepaths = [] logger.error( f"Got ctrl+c, set manager filepaths = [], please wait until all workers are done" )
def add_new_iso_to_wordpress(self, iso=None): if iso is None: logger.error('iso cant be None') return parent_id = self.post_id_map[iso.pkgname] title = ''
def get_from_pkgbuild(self, var=None): """ Get a variable from the package's PKGBUILD (which is stored in antergos-packages gh repo). :param var: (str) A variable to extract from the PKGBUILD. :return: (str) The variable's value after extracted from PKGBUILD. """ if var is None: logger.error('get_from_pkgbuild var is none') return '' self.maybe_update_pkgbuild_repo() path = None paths = [os.path.join('/var/tmp/antergos-packages/', self.pkgname), os.path.join('/var/tmp/antergos-packages/deepin_desktop', self.pkgname), os.path.join('/var/tmp/antergos-packages/cinnamon', self.pkgname)] for p in paths: logger.debug(p) if os.path.exists(p): ppath = os.path.join(p, 'PKGBUILD') logger.debug(ppath) if os.path.exists(ppath): path = ppath if p == paths[0] and 'cinnamon' != self.pkgname and len(self.allowed_in) == 0: self.allowed_in.append('main') break else: logger.error('get_from_pkgbuild cant determine pkgbuild path for %s', self.name) return '' parse = open(path).read() dirpath = os.path.dirname(path) if var in ['source', 'depends', 'makedepends', 'arch']: cmd = 'source ' + path + '; echo ${' + var + '[*]}' else: cmd = 'source ' + path + '; srcdir=$CWD; echo ${' + var + '}' if var == "pkgver" and ('git+' in parse or 'cnchi' in self.name or 'git://' in parse): giturl = re.search('(?<=git\\+).+(?="|\')', parse) if giturl: giturl = giturl.group(0) else: giturl = re.search('(?<="|\')git:.+(?="|\')', parse) if giturl: giturl = giturl.group(0) else: giturl = '' gitnm = self.name if self.name == 'pamac-dev': gitnm = 'pamac' elif self.name == 'cnchi-dev': gitnm = 'cnchi' giturl = 'http://github.com/lots0logs/cnchi-dev.git' elif self.name == 'cnchi': giturl = 'http://github.com/antergos/cnchi.git' if os.path.exists(os.path.join(dirpath, gitnm)): shutil.rmtree(os.path.join(dirpath, gitnm), ignore_errors=True) try: subprocess.check_output(['git', 'clone', giturl, gitnm], cwd=dirpath) except subprocess.CalledProcessError as err: logger.error(err.output) cmd = 'source ' + path + '; ' + var proc = subprocess.Popen(cmd, executable='/bin/bash', shell=True, cwd=dirpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if len(out) > 0: out = out.strip() logger.info('proc.out is %s' % out) if len(err) > 0: logger.error('proc.err is %s', err) return out