Exemple #1
0
def _remove(pkgnames, target_archs=[a for a in ARCHS if a != 'any']):
    assert type(pkgnames) is list and pkgnames
    assert not [None for s in pkgnames if not (type(s) is str)]
    logger.info('starting remove %s for %s', pkgnames, target_archs)
    if len(target_archs) == 1 and target_archs[0] == 'any':
        target_archs = ARCHS
    else:
        assert 'any' not in target_archs
    for arch in target_archs:
        remove_pkgs = list()
        basedir = Path('www') / arch
        for fpath in basedir.iterdir():
            if fpath.name.endswith(PKG_SUFFIX) and \
                get_pkg_details_from_name(fpath.name).pkgname in pkgnames:
                remove_pkgs.append(fpath)
        if remove_pkgs:
            logger.info("repo-remove: %s", repo_remove(remove_pkgs))
        else:
            logger.warning(f'Nothing to remove in {arch}')
    archive_dir = Path('archive')
    for fpath in archive_dir.iterdir():
        nosigname = fpath.name[:-4] if fpath.name.endswith(
            '.sig') else fpath.name
        if nosigname.endswith(PKG_SUFFIX) and \
            get_pkg_details_from_name(nosigname).pkgname in pkgnames:
            throw_away(fpath)
    logger.info('finished remove')
    return True
Exemple #2
0
def filter_old_pkg(fpaths, keep_new=1, archive=False, recycle=False):
    '''
        Accepts a list of paths (must be in the same dir)
        return a tuple of list of paths
        ([new1, new2], [old1, old2])
        packages are arranged from new to old, one by one.
        new: pkga-v8, pkga-v7, pkgb-v5, pkgb-v4
        old: pkga-v6, pkga-v5, pkgb-v3, pkgb-v2
        (assume keep_new=2)
    '''
    if not fpaths:
        return (list(), list())
    assert type(fpaths) is list
    for fpath in fpaths:
        assert issubclass(type(fpath), os.PathLike) and \
               fpath.name.endswith(PKG_SUFFIX)
    assert not (archive and recycle)
    assert not [None for fpath in fpaths if fpath.parent != fpaths[0].parent]

    new_pkgs = list()
    old_pkgs = list()
    pkgs_vers = dict()
    for fpath in fpaths:
        pkg = get_pkg_details_from_name(fpath.name)
        pkgs_vers.setdefault(pkg.pkgname + pkg.arch, list()).append(pkg)
    for pkgname_arch in pkgs_vers:
        family = pkgs_vers[pkgname_arch]
        # new packages first
        family = sorted(family, reverse=True)
        if len(family) > keep_new:
            new_pkgs += family[:keep_new]
            old_pkgs += family[keep_new:]
        else:
            new_pkgs += family
    for pkg in old_pkgs:
        fullpath = fpaths[0].parent / pkg.fname
        sigpath = fpaths[0].parent / f'{pkg.fname}.sig'
        if archive:
            archive_pkg(fullpath)
            if sigpath.exists():
                archive_pkg(sigpath)
        elif recycle:
            throw_away(fullpath)
            if sigpath.exists():
                throw_away(sigpath)
    return (new_pkgs, old_pkgs)
Exemple #3
0
def repo_remove(fpaths):
    assert type(fpaths) is list
    assert not [None for fpath in fpaths if fpath.parent != fpaths[0].parent]
    for fpath in fpaths:
        assert issubclass(type(fpath), os.PathLike) and \
               fpath.name.endswith(PKG_SUFFIX)
    dbpath = fpaths[0].parent / f'{REPO_NAME}.db.tar.gz'
    for fpath in fpaths:
        throw_away(fpath)
        sigpath = fpath.parent / f'{fpath.name}.sig'
        # there is a fscking problem that fscking pathlib always follow symlinks
        if sigpath.exists() or sigpath.is_symlink():
            throw_away(sigpath)
    pkgnames = [
        get_pkg_details_from_name(fpath.name).pkgname for fpath in fpaths
    ]
    return bash(f'{REPO_REMOVE_CMD} {dbpath} {" ".join(pkgnames)}',
                RUN_CMD_TIMEOUT=5 * 60)
Exemple #4
0
def _update(overwrite=False):
    logger.info('starting update')
    update_path = Path('updates')
    assert update_path.exists()
    pkgs_to_add = dict()
    filter_old_pkg(
        [f for f in update_path.iterdir() if f.name.endswith(PKG_SUFFIX)],
        keep_new=1,
        archive=True)
    for pkg_to_add in update_path.iterdir():
        if pkg_to_add.is_dir():
            continue
        else:
            if pkg_to_add.name.endswith(PKG_SUFFIX):
                sigfile = Path(f"{pkg_to_add}.sig")
                if sigfile.exists():
                    arch = get_pkg_details_from_name(pkg_to_add.name).arch
                    pkg_nlocation = pkg_to_add.parent / '..' / 'www' / arch / pkg_to_add.name
                    sig_nlocation = Path(f'{pkg_nlocation}.sig')
                    logger.info(
                        f'Copying {pkg_to_add} to {pkg_nlocation}, {sigfile} to {sig_nlocation}'
                    )
                    if overwrite:
                        for nlocation in (pkg_nlocation, sig_nlocation):
                            if nlocation.exists():
                                logger.warning(f'Overwriting {nlocation}')
                    else:
                        should_continue = False
                        for nlocation in (pkg_nlocation, sig_nlocation):
                            if nlocation.exists():
                                logger.warning(
                                    'Same version is already in the repo.')
                                throw_away(pkg_to_add)
                                should_continue = True
                                break
                        if should_continue:
                            continue
                    copyfile(pkg_to_add, pkg_nlocation)
                    copyfile(sigfile, sig_nlocation)
                    archive_pkg(pkg_to_add)
                    archive_pkg(sigfile)
                    if arch == 'any':
                        for arch in ARCHS:
                            pkg_nlocation = pkg_to_add.parent / '..' / 'www' / arch / pkg_to_add.name
                            pkgs_to_add.setdefault(
                                arch, list()).append(pkg_nlocation)
                    else:
                        pkgs_to_add.setdefault(arch,
                                               list()).append(pkg_nlocation)
                else:
                    logger.warning(f'{pkg_to_add} has no signature!')
                    throw_away(pkg_to_add)
    if 'any' in pkgs_to_add:
        _regenerate(target_archs=ARCHS, just_symlink=True)
    for arch in pkgs_to_add:
        logger.info("repo-add: %s", repo_add(pkgs_to_add[arch]))
    # remove add other things
    for other in update_path.iterdir():
        if other.is_dir():
            continue
        else:
            logger.warning(f"{other} is garbage!")
            throw_away(other)
    logger.info('finished update')
    return True
Exemple #5
0
def _regenerate(target_archs=ARCHS, just_symlink=False):
    if just_symlink:
        logger.info('starting regenerate symlinks %s', target_archs)
    else:
        logger.info('starting regenerate %s', target_archs)
    rn = REPO_NAME
    repo_files = (f"{rn}.db {rn}.db.tar.gz {rn}.db.tar.gz.old "
                  f"{rn}.files {rn}.files.tar.gz {rn}.files.tar.gz.old")
    repo_files = repo_files.split(' ')
    repo_files_essential = [
        fname for fname in repo_files if not fname.endswith('.old')
    ]
    assert repo_files_essential
    # make symlink for arch=any pkgs
    basedir = Path('www') / 'any'
    if basedir.exists():
        for pkgfile in basedir.iterdir():
            if pkgfile.name.endswith(PKG_SUFFIX) and \
               get_pkg_details_from_name(pkgfile.name).arch == 'any':
                sigfile = Path(f"{pkgfile}.sig")
                if sigfile.exists():
                    logger.info(f'Creating symlink for {pkgfile}, {sigfile}')
                    for arch in target_archs:
                        if arch == 'any':
                            continue
                        symlink(pkgfile.parent / '..' / arch / pkgfile.name,
                                f'../any/{pkgfile.name}')
                        symlink(sigfile.parent / '..' / arch / sigfile.name,
                                f'../any/{sigfile.name}')
    else:
        logger.error(f'{arch} dir does not exist!')
    if just_symlink:
        return True
    # run repo_add
    for arch in target_archs:
        basedir = Path('www') / arch
        repo_files_count = list()
        pkgs_to_add = list()
        if not basedir.exists():
            logger.error(f'{arch} dir does not exist!')
            continue
        filter_old_pkg(
            [f for f in basedir.iterdir() if f.name.endswith(PKG_SUFFIX)],
            keep_new=1,
            recycle=True)
        pkgfiles = [f for f in basedir.iterdir()]
        for pkgfile in pkgfiles:
            if pkgfile.name in repo_files:
                repo_files_count.append(pkgfile.name)
                continue
            if pkgfile.name.endswith(PKG_SIG_SUFFIX):
                if not Path(str(pkgfile)[:-4]).exists() and pkgfile.exists():
                    logger.warning(f"{pkgfile} has no package!")
                    throw_away(pkgfile)
                    continue
            elif pkgfile.name.endswith(PKG_SUFFIX):
                sigfile = Path(f"{pkgfile}.sig")
                if not sigfile.exists():
                    logger.warning(f"{pkgfile} has no signature!")
                    throw_away(pkgfile)
                    continue
                realarch = get_pkg_details_from_name(pkgfile.name).arch
                if realarch != 'any' and realarch != arch:
                    newpath = pkgfile.parent / '..' / realarch / pkgfile.name
                    newSigpath = Path(f'{newpath}.sig')
                    logger.info(
                        f'Moving {pkgfile} to {newpath}, {sigfile} to {newSigpath}'
                    )
                    assert not (newpath.exists() or newSigpath.exists())
                    pkgfile.rename(newpath)
                    sigfile.rename(newSigpath)
                    pkgs_to_add.append(newpath)
                else:
                    pkgs_to_add.append(pkgfile)
            else:
                logger.warning(f"{pkgfile} is garbage!")
                throw_away(pkgfile)
        if pkgs_to_add:
            logger.info("repo-add: %s", repo_add(pkgs_to_add))
        else:
            logger.warning('repo-add: Nothing to do in %s', arch)
        for rfile in repo_files_essential:
            if rfile not in repo_files_count:
                logger.error(f'{rfile} does not exist in {arch}!')
    logger.info('finished regenerate')
    return True
Exemple #6
0
 def __get_new_ver(self, dirname, arch):
     pkgfiles = self.__get_package_list(dirname, arch)
     ver = get_pkg_details_from_name(pkgfiles[0]).ver
     return ver
Exemple #7
0
 def __upload(self, job, overwrite=False):
     cwd = REPO_ROOT / job.pkgconfig.dirname
     f_to_upload = list()
     pkg_update_list = list()
     for fpath in cwd.iterdir():
         if fpath.name.endswith(PKG_SUFFIX) and \
            get_pkg_details_from_name(fpath.name).ver == job.version:
             sigpath = fpath.parent / f'{fpath.name}.sig'
             assert sigpath.exists()
             f_to_upload.append(sigpath)
             f_to_upload.append(fpath)
             pkg_update_list.append(fpath)
     sizes = [f.stat().st_size / 1000 / 1000 for f in f_to_upload]
     pkg_update_list_human = " ".join([f.name for f in pkg_update_list])
     assert pkg_update_list
     max_tries = 10
     for tries in range(max_tries):
         timeouts = rrun('push_start',
                         args=([f.name for f in f_to_upload], sizes))
         if type(timeouts) is list:
             break
         else:
             if tries + 1 < max_tries:
                 logger.warning(
                     f'Remote is busy ({timeouts}), wait 1 min x10 [{tries+1}/10]'
                 )
                 sleep(60)
     else:
         raise RuntimeError('Remote is busy and cannot connect')
     assert len(f_to_upload) == len(timeouts)
     pkgs_timeouts = {
         f_to_upload[i]: timeouts[i]
         for i in range(len(sizes))
     }
     for f in f_to_upload:
         max_tries = 5
         for tries in range(max_tries):
             timeout = pkgs_timeouts.get(f)
             try:
                 logger.info(f'Uploading {f.name}, timeout in {timeout}s')
                 mon_bash(UPLOAD_CMD.format(src=f), seconds=int(timeout))
             except Exception:
                 time_to_sleep = (tries + 1) * 60
                 logger.error(
                     f'We are getting problem uploading {f.name}, wait {time_to_sleep} secs'
                 )
                 patret = rrun('push_add_time',
                               args=(f.name, time_to_sleep + timeout))
                 if not patret is None:
                     logger.error(
                         f'Unable to run push_add_time, reason: {patret}')
                 print_exc_plus()
                 if tries + 1 < max_tries:
                     sleep(time_to_sleep)
             else:
                 break
         else:
             logger.error(
                 f'Upload {f.name} failed, running push_fail and abort.')
             pfret = rrun('push_fail', args=(f.name, ))
             if not pfret is None:
                 logger.error(f'Unable to run push_fail, reason: {pfret}')
             raise RuntimeError('Unable to upload some files')
     logger.info(f'Requesting repo update for {pkg_update_list_human}')
     res = "unexpected"
     max_tries = 5
     for tries in range(max_tries):
         try:
             res = rrun('push_done',
                        args=([f.name for f in f_to_upload], ),
                        kwargs={
                            'overwrite': overwrite,
                        })
         except Exception:
             time_to_sleep = (tries + 1) * 60
             logger.info(
                 f'Error updating {pkg_update_list_human}, wait {time_to_sleep} secs'
             )
             print_exc_plus()
             if tries + 1 < max_tries:
                 sleep(time_to_sleep)
         else:
             break
     else:
         ret = f'Update failed for {pkg_update_list_human}: max reties exceeded'
         logger.error(ret)
         raise RuntimeError(ret)
     if res is None:
         logger.info(f'Update success for {pkg_update_list_human}')
     else:
         ret = f'Update failed for {pkg_update_list_human}, reason: {res}'
         logger.error(ret)
         raise RuntimeError(ret)
     return res is None