def get_deps(self): depends = [] deps = self.get_from_pkgbuild('depends').split() logger.info('deps are %s', deps) mkdeps = self.get_from_pkgbuild('makedepends').split() queue = status.queue for dep in deps: has_ver = re.search('^[\d\w]+(?=\=|\>|\<)', dep) if has_ver is not None: dep = has_ver.group(0) if dep in status.all_packages and dep in queue: depends.append(dep) self.depends = dep for mkdep in mkdeps: has_ver = re.search('^[\d\w]+(?=\=|\>|\<)', mkdep) if has_ver is not None: mkdep = has_ver.group(0) if mkdep in status.all_packages and mkdep in queue: depends.append(mkdep) self.depends = mkdep res = (self.name, depends) return res
def get_version(): iso = [x for x in os.listdir(TESTING_DIR) if x.endswith('.iso')] match = re.match('\d{4}(\.\d{1,2}){2}', iso[0]) if match: logger.info(match) return match.group(0) raise ValueError
def get_deps(self): """ :return: """ depends = [] deps = self.get_from_pkgbuild('depends').split() logger.info('deps are %s', deps) mkdeps = self.get_from_pkgbuild('makedepends').split() build_queue = status.queue hook_queue = status.hook_queue queue = build_queue + hook_queue all_deps = deps + mkdeps for dep in all_deps: has_ver = re.search('^[\d\w]+(?=\=|\>|\<)', dep) if has_ver and has_ver is not None: dep = has_ver.group(0) if dep in status.all_packages and dep in queue: depends.append(dep) if dep in deps: self.depends.add(dep) elif dep in mkdeps: self.makedepends.add(dep) res = (self.name, depends) return res
def update_and_push_github(self, var=None, old_val=None, new_val=None): """ :param var: :param old_val: :param new_val: :return: """ if self.push_version != "True" or old_val == new_val: return gh = login(token=self.gh_user) repo = gh.repository('antergos', 'antergos-packages') tf = repo.file_contents(self.name + '/PKGBUILD') content = tf.decoded search_str = '%s=%s' % (var, old_val) replace_str = '%s=%s' % (var, new_val) content = content.replace(search_str, replace_str) ppath = os.path.join('/opt/antergos-packages/', self.name, '/PKGBUILD') with open(ppath, 'w') as pbuild: pbuild.write(content) pbuild.close() commit = tf.update( '[ANTBS] | Updated %s to %s in PKGBUILD for %s' % (var, new_val, self.name), content) if commit and commit['commit'] is not None: try: logger.info('@@-package.py-@@ | commit hash is %s', commit['commit'].sha) except AttributeError: pass return True else: logger.error('@@-package.py-@@ | commit failed') return False
def handle_hook(): saved_status = set_server_status(first=True) if not status.iso_flag: image = docker_utils.DockerUtils().maybe_build_base_devel() else: status.iso_flag = False image = docker_utils.DockerUtils().maybe_build_mkarchiso() if not image: set_server_status(first=False, saved_status=saved_status) return False if status.queue: tnum = status.queue.lpop() transaction = get_trans_object(tnum=tnum, repo_queue=repo_queue) transaction.start() set_server_status(first=False, saved_status=saved_status) if not status.queue and not status.hook_queue: status.idle = True status.building = 'Idle' status.container = '' status.building_num = '' status.building_start = '' status.iso_building = False logger.info('All builds completed.')
def get_from_pkgbuild(self, var=None): if var is None: logger.error('get_from_pkgbuild var is none') return '' self.maybe_update_pkgbuild_repo() path = None paths = [os.path.join('/var/tmp/antergos-packages/', self.name), os.path.join('/var/tmp/antergos-packages/deepin_desktop', self.name), os.path.join('/var/tmp/antergos-packages/cinnamon', self.name)] for p in paths: if os.path.exists(p): path = os.path.join(p, 'PKGBUILD') if p == paths[0] and 'cinnamon' != self.pkgname and len(self.allowed_in()) == 0: self.allowed_in().append('main') break else: logger.error('get_from_pkgbuild cant determine pkgbuild path') parse = open(path).read() dirpath = os.path.dirname(path) if var in ['source', 'depends', 'makedepends', 'arch']: cmd = 'source ' + path + '; echo ${' + var + '[*]}' else: cmd = 'source ' + path + '; echo ${' + var + '}' if var == "pkgver" and ('git+' in parse or self.name == 'cnchi-dev'): giturl = re.search('(?<=git\\+).+(?="|\')', parse) if giturl: giturl = giturl.group(0) pkgdir, pkgbuild = os.path.split(path) gitnm = self.name if self.name == 'pamac-dev': gitnm = 'pamac' elif self.name == 'cnchi-dev': giturl = 'http://github.com/lots0logs/cnchi-dev.git' if os.path.exists(os.path.join(pkgdir, gitnm)): shutil.rmtree(os.path.join(pkgdir, gitnm), ignore_errors=True) try: subprocess.check_output(['git', 'clone', giturl, gitnm], cwd=pkgdir) if self.name == 'cnchi-dev': subprocess.check_output(['tar', '-cf', 'cnchi-dev.tar', 'cnchi-dev'], cwd=pkgdir) except subprocess.CalledProcessError as err: logger.error(err.output) cmd = 'source ' + path + '; ' + var proc = subprocess.Popen(cmd, executable='/bin/bash', shell=True, cwd=dirpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if len(out) > 0: out = out.strip() logger.info('@@-package.py-@@ | proc.out is %s' % out) if len(err) > 0: logger.error('@@-package.py-@@ | proc.err is %s', err) return out
def add_iso_versions_to_wordpress(iso_pkgs): bridge = iso.WordPressBridge(auth=(status.docker_user, status.wp_password)) success = [] for iso_pkg in iso_pkgs: success.append(bridge.add_new_iso_version(iso_pkg)) logger.info(success) return all(success)
def __init__(self): self.post_id_map = { 'antergos-x86_64': '1252', 'antergos-i686': '1257', 'antergos-minimal-x86_64': '1562', 'antergos-minimal-i686': '1564' } logger.info('WordPressBridge Object Initialized')
def __init__(self, auth): self.post_id_map = { 'antergos-x86_64': '2563', 'antergos-i686': '2564', 'antergos-minimal-x86_64': '2565', 'antergos-minimal-i686': '2566' } self.auth = auth logger.info('WordPressBridge Object Initialized') self.success = False self.dist = 'antergos'
def process_manual(self): index = self.manual_trans_index try: key = db.lrange('antbs:github:payloads:index', -index, -index) logger.info(key) logger.info(key[0]) self.payload = db.hgetall(key[0]) except Exception as err: logger.error(err) self.result = 500 return self.commits = ast.literal_eval(self.payload['commits']) self.is_github = True
def process_github(self): if self.is_manual: return self.payload = json.loads(self.request.data.decode('UTF-8')) # Save payload in the database temporarily in case we need it later. dt = datetime.datetime.now().strftime("%m%d%Y-%I%M") key = 'antbs:github:payloads:{0}'.format(dt) if db.exists(key): for i in range(1, 5): tmp = '{0}:{1}'.format(key, i) if not db.exists(tmp): key = tmp break db.hmset(key, self.payload) db.rpush('antbs:github:payloads:index', key) db.expire(key, 172800) self.full_name = self.payload['repository']['full_name'] self.repo = self.payload['repository']['name'] self.pusher = self.payload['pusher']['name'] self.commits = self.payload['commits'] if self.repo == 'numix-icon-theme': rate_limit = True if self.repo not in status.queue and self.repo != status.building: if not db.exists('numix-commit-flag'): self.changes.append(['numix-icon-theme']) self.is_numix = True db.setex('numix-commit-flag', 3600, 'True') rate_limit = False if rate_limit: msg = 'RATE LIMIT IN EFFECT FOR numix-icon-theme' logger.info(msg) self.result = json.dumps({'msg': msg}) else: self.repo = 'antergos-packages' elif self.repo == 'cnchi-dev': self.changes.append(['cnchi-dev']) self.is_cnchi = True elif self.pusher != "antbs": for commit in self.commits: self.changes.append(commit['modified']) self.changes.append(commit['added'])
def check_mirror_for_iso(version): synced = [] for iso_pkg in status.iso_pkgs: iso_obj = package.get_pkg_object(name=iso_pkg) req = requests.head(iso_obj.iso_url, allow_redirects=True) try: req.raise_for_status() synced.append(iso_obj) except Exception as err: logger.info(err) if len(synced) == 4: success = add_iso_versions_to_wordpress(synced) if success: iso.clean_up_after_release(version) db.delete('antbs:misc:iso-release:do_check')
def add_new_iso_version(self, iso_pkg_obj=None): if iso_pkg_obj is None: logger.error('iso cant be None') return False else: iso_obj = iso_pkg_obj logger.info('adding_new_iso_version: %s', iso_obj) pid = self.post_id_map[iso_obj.pkgname] query = 'json=get_nonce&controller=' + self.dist + '&method=handle_request' post_url = 'https://' + self.dist + '.com/?' + query session = requests.Session() session.mount('http://', SourceAddressAdapter((status.request_from, 0))) session.mount('https://', SourceAddressAdapter((status.request_from, 0))) session.auth = self.auth try: req = session.get(post_url) req.raise_for_status() logger.info(req.text) req = req.json() logger.info(req) if req.get('nonce', False): nonce = req.get('nonce') query = 'json=' + self.dist + '.handle_request&nonce=' post_url = 'https://' + self.dist + '.com/?' + query + nonce + '&api_key=' + API_KEY req = session.post(post_url, data=dict(pid=pid, url=iso_obj.iso_url, md5=iso_obj.iso_md5, version=iso_obj.pkgver)) req.raise_for_status() logger.info(req.text) self.success = True except Exception as err: self.success = False logger.error(err) return False return True
def _print_log(self, epoch, batch_idx, batch_start_time, loss): logger.info(f"Batch {batch_idx}, saving output ..")
def make_dirs(dir_name): if not os.path.exists(dir_name): os.makedirs(dir_name) logger.info(f"Directory {dir_name} made")
'Training and test covariances have different dimensions' diff = mu1 - mu2 # Product might be almost singular covmean, _ = linalg.sqrtm(sigma1.dot(sigma2), disp=False) if not np.isfinite(covmean).all(): msg = ('fid calculation produces singular product; ' 'adding %s to diagonal of cov estimates') % eps logger.warning(msg) offset = np.eye(sigma1.shape[0]) * eps covmean = linalg.sqrtm((sigma1 + offset).dot(sigma2 + offset)) # Numerical error might give slight imaginary component if np.iscomplexobj(covmean): if not np.allclose(np.diagonal(covmean).imag, 0, atol=1e-3): m = np.max(np.abs(covmean.imag)) raise ValueError('Imaginary component {}'.format(m)) covmean = covmean.real tr_covmean = np.trace(covmean) return (diff.dot(diff) + np.trace(sigma1) + # NOQA np.trace(sigma2) - 2 * tr_covmean) if __name__ == "__main__": args = parse_args() logger.info(args) main(args)
def rm_dirs(dir_name, ignore_errors=False): if os.path.exists(dir_name): shutil.rmtree(dir_name, ignore_errors) logger.info(f"Directory {dir_name} removed")
def _load_pretrained(self, pretrained_path): """ Load pretrained model not strictly """ logger.info(f"Loading pretrained checkpoint: {pretrained_path} ...") checkpoint = torch.load(pretrained_path) model = self._get_non_parallel_model() model.load_state_dict(checkpoint['state_dict'], strict=False)
def get_from_pkgbuild(self, var=None): """ Get a variable from the package's PKGBUILD (which is stored in antergos-packages gh repo). :param var: (str) A variable to extract from the PKGBUILD. :return: (str) The variable's value after extracted from PKGBUILD. """ if var is None: logger.error('get_from_pkgbuild var is none') return '' self.maybe_update_pkgbuild_repo() path = None paths = [os.path.join('/var/tmp/antergos-packages/', self.pkgname), os.path.join('/var/tmp/antergos-packages/deepin_desktop', self.pkgname), os.path.join('/var/tmp/antergos-packages/cinnamon', self.pkgname)] for p in paths: logger.debug(p) if os.path.exists(p): ppath = os.path.join(p, 'PKGBUILD') logger.debug(ppath) if os.path.exists(ppath): path = ppath if p == paths[0] and 'cinnamon' != self.pkgname and len(self.allowed_in) == 0: self.allowed_in.append('main') break else: logger.error('get_from_pkgbuild cant determine pkgbuild path for %s', self.name) return '' parse = open(path).read() dirpath = os.path.dirname(path) if var in ['source', 'depends', 'makedepends', 'arch']: cmd = 'source ' + path + '; echo ${' + var + '[*]}' else: cmd = 'source ' + path + '; srcdir=$CWD; echo ${' + var + '}' if var == "pkgver" and ('git+' in parse or 'cnchi' in self.name or 'git://' in parse): giturl = re.search('(?<=git\\+).+(?="|\')', parse) if giturl: giturl = giturl.group(0) else: giturl = re.search('(?<="|\')git:.+(?="|\')', parse) if giturl: giturl = giturl.group(0) else: giturl = '' gitnm = self.name if self.name == 'pamac-dev': gitnm = 'pamac' elif self.name == 'cnchi-dev': gitnm = 'cnchi' giturl = 'http://github.com/lots0logs/cnchi-dev.git' elif self.name == 'cnchi': giturl = 'http://github.com/antergos/cnchi.git' if os.path.exists(os.path.join(dirpath, gitnm)): shutil.rmtree(os.path.join(dirpath, gitnm), ignore_errors=True) try: subprocess.check_output(['git', 'clone', giturl, gitnm], cwd=dirpath) except subprocess.CalledProcessError as err: logger.error(err.output) cmd = 'source ' + path + '; ' + var proc = subprocess.Popen(cmd, executable='/bin/bash', shell=True, cwd=dirpath, stdout=subprocess.PIPE, stderr=subprocess.PIPE) out, err = proc.communicate() if len(out) > 0: out = out.strip() logger.info('proc.out is %s' % out) if len(err) > 0: logger.error('proc.err is %s', err) return out
def _save_inference_results(self, name: str, worker_output: dict): path = os.path.join(self.saving_dir, f'{name}_output.npz') logger.info(f'Saving {path} ...') np.savez(path, **worker_output)
def process_changes(self): tpl = 'Webhook triggered by <strong>{0}.</strong> Packages added to the build queue: {1}' if self.repo == "antergos-packages": logger.debug("Build hook triggered. Updating build queue.") has_pkgs = False no_dups = [] for changed in self.changes: # logger.info(changed) if len(changed) > 0: for item in changed: # logger.info(item) if item and self.is_gitlab or self.is_numix or self.is_cnchi: pak = item elif item and "PKGBUILD" in item: pak, pkb = item.rsplit('/', 1) pak = pak.rsplit('/', 1)[-1] else: pak = None # logger.info(pak) if pak and 'antergos-iso' != pak: logger.info('Adding %s to the build queue.' % pak) no_dups.append(pak) status.all_packages.add(pak) has_pkgs = True if has_pkgs: the_pkgs = list(set(no_dups)) last_pkg = the_pkgs[-1] html = [] if len(the_pkgs) > 1: html.append('<ul class="hook-pkg-list">') for p in the_pkgs: if p: if len(the_pkgs) > 1: item = '<li>{0}</li>'.format(p) else: item = '<strong>{0}</strong>'.format(p) html.append(item) if p == last_pkg: if self.is_gitlab: source = 'Gitlab' tltype = 2 else: source = 'Github' tltype = 1 if len(the_pkgs) > 1: html.append('</ul>') the_pkgs_str = ''.join(html) tl_event = get_timeline_object(msg=tpl.format(source, the_pkgs_str), tl_type=tltype, packages=the_pkgs) p_obj = package.get_pkg_object(name=p) events = p_obj.tl_events events.append(tl_event.event_id) del p_obj trans_obj = get_trans_object(the_pkgs) status.queue.append(trans_obj.tnum) queue.enqueue_call(builder.handle_hook, timeout=84600) if not self.result: self.result = json.dumps({'msg': 'OK!'})
def _setup_model(self): np.random.seed() self.model.train() for key, optimizer in self.optimizers.items(): logger.info(f'Current lr of optimizer {key}: {get_lr(optimizer)}')
def _print_config_messages(self): global_config.print_changed() logger.info(f'Experiment name: {global_config["name"]}')