Esempio n. 1
0
def reset_cache(logger, build_folder, cache_folder, **kwargs):
    """ wipe out the cache folder, optionnaly keeping latest master """
    logger.step("Reseting cache folder: {}".format(cache_folder))

    cache_size, free_space = display_cache_and_free_space(
        logger, build_folder, cache_folder
    )
    logger.std("-------------")

    if kwargs.get("keep_master"):
        tmp_master_fpath = None

        master = get_content("hotspot_master_image")
        master_fpath = os.path.join(cache_folder, master["name"])
        if (
            os.path.exists(master_fpath)
            and get_checksum(master_fpath) == master["checksum"]
        ):
            # latest master to be moved temporarly to build-dir
            tmp_master_fpath = os.path.join(
                build_folder, ".__tmp--{}".format(master["name"])
            )
            logger.std("Keeping your latest master aside: {}".format(master["name"]))
            try:
                shutil.move(master_fpath, tmp_master_fpath)
            except Exception as exp:
                logger.err("Unable to move your latest master into build-dir. Exiting.")
                return 1

    logger.std("Removing cache...", end="")
    try:
        shutil.rmtree(cache_folder)
    except Exception as exp:
        logger.err("FAILED ({}).".format(exp))
    else:
        logger.succ("OK.")

    logger.std("Recreating cache placeholder.")
    cache_folder = get_cache(build_folder)

    if kwargs.get("keep_master"):
        logger.std("Restoring your latest master.")
        try:
            shutil.move(tmp_master_fpath, master_fpath)
        except Exception as exp:
            logger.err("Unable to move back your master file into fresh cache.")
            if tmp_master_fpath is not None:
                logger.err("Please find your master at: {}".format(tmp_master_fpath))
            return 1

    logger.std("-------------")
    display_cache_and_free_space(
        logger, build_folder, cache_folder, cache_size, free_space
    )

    return 0
Esempio n. 2
0
def content_is_cached(content, cache_folder, check_sum=False):
    """ whether a content is already present in cache """
    content_fpath = os.path.join(cache_folder, content.get("name"))
    if not os.path.exists(content_fpath) or os.path.getsize(
            content_fpath) != content.get("archive_size"):
        return False

    if check_sum:
        return get_checksum(content_fpath) == content.get("checksum")

    return True
Esempio n. 3
0
def package_is_latest_version(fpath, fname, logger):
    """ whether a package (ZIM or ZIP) is in the current catalog """

    for catalog in get_catalogs(logger):
        for (package_id, package) in catalog["all"].items():
            package.update({"ext": "zip" if package["type"] != "zim" else "zim"})
            package.update({"langid": package.get("langid") or package_id})
            rfname = "package_{langid}-{version}.{ext}".format(**package)
            if rfname == fname and get_checksum(fpath) == package["sha256sum"]:
                return "{ext}: {fname}".format(
                    fname=package["langid"], ext=package["ext"].upper()
                )
    return False
Esempio n. 4
0
def refresh_yum_repo(mirror, data, mirror_url, ts):
    """ Refresh package metadata for a yum-style rpm mirror
        and add the packages to the mirror
    """
    primary_url, checksum, checksum_type = get_primary_url(mirror_url, data)

    if not primary_url:
        mirror.fail()
        return

    res = get_url(primary_url)
    mirror.last_access_ok = response_is_valid(res)

    if not mirror.last_access_ok:
        mirror.fail()
        return

    data = download_url(res, 'Downloading repo info (2/2):')
    if data is None:
        mirror.fail()
        return

    computed_checksum = get_checksum(data, Checksum[checksum_type])
    if not mirror_checksum_is_valid(computed_checksum, checksum, mirror):
        return

    if mirror.file_checksum == checksum:
        text = 'Mirror checksum has not changed, '
        text += 'not refreshing package metadata'
        warning_message.send(sender=None, text=text)
        return

    mirror.file_checksum = checksum

    if hasattr(settings, 'MAX_MIRRORS') and \
            isinstance(settings.MAX_MIRRORS, int):
        max_mirrors = settings.MAX_MIRRORS
        # only refresh X mirrors, where X = max_mirrors
        checksum_q = Q(mirrorlist=False,
                       refresh=True,
                       timestamp=ts,
                       file_checksum=checksum)
        have_checksum = mirror.repo.mirror_set.filter(checksum_q).count()
        if have_checksum >= max_mirrors:
            text = '{0!s} mirrors already have this '.format(max_mirrors)
            text += 'checksum, ignoring refresh to save time'
            info_message.send(sender=None, text=text)
        else:
            packages = extract_yum_packages(data, primary_url)
            if packages:
                update_mirror_packages(mirror, packages)
Esempio n. 5
0
def download_if_missing(url, fpath, logger, checksum=None):
    """ returns local file if existing and matching sum otherwise download """

    # file already downloaded
    if checksum and os.path.exists(fpath):
        logger.std("calculating sum for {}...".format(fpath), "")
        if get_checksum(fpath) == checksum:
            logger.std("MATCH.")
            return RequestedFile.from_disk(url, fpath, checksum)
        logger.std("MISMATCH.")
    elif os.path.exists(fpath):
        return RequestedFile.from_disk(url, fpath)

    return download_file(url, fpath, logger, checksum, debug=True)
Esempio n. 6
0
def is_latest_version(fpath, fname, logger):
    """ whether the filename is a usable content """

    if fname.startswith("package_"):
        return package_is_latest_version(fpath, fname, logger)

    for key, content in CONTENTS.items():
        if not content["name"] == fname:
            continue

        if get_checksum(fpath) == content["checksum"]:
            return key

    return False
Esempio n. 7
0
def refresh_arch_repo(repo):
    """ Refresh all mirrors of an arch linux repo
    """
    if hasattr(settings, 'MAX_MIRRORS') and \
           isinstance(settings.MAX_MIRRORS, int):
        max_mirrors = settings.MAX_MIRRORS
    fname = '{0!s}/{1!s}.db'.format(repo.arch, repo.repo_id)
    ts = datetime.now().replace(microsecond=0)
    for i, mirror in enumerate(repo.mirror_set.filter(refresh=True)):
        res = find_mirror_url(mirror.url, [fname])
        mirror.last_access_ok = response_is_valid(res)
        if mirror.last_access_ok:
            if i >= max_mirrors:
                text = '{0!s} mirrors already refreshed, '.format(max_mirrors)
                text += ' not refreshing {0!s}'.format(mirror.url)
                warning_message.send(sender=None, text=text)
                continue
            mirror_url = res.url
            text = 'Found arch repo - {0!s}'.format(mirror_url)
            info_message.send(sender=None, text=text)
            data = download_url(res, 'Downloading repo info:')
            if data is None:
                mirror.fail()
                return
            computed_checksum = get_checksum(data, Checksum.sha1)
            if mirror.file_checksum == computed_checksum:
                text = 'Mirror checksum has not changed, '
                text += 'not refreshing package metadata'
                warning_message.send(sender=None, text=text)
            else:
                packages = extract_arch_packages(data)
                mirror.last_access_ok = True
                mirror.timestamp = ts
                update_mirror_packages(mirror, packages)
                mirror.file_checksum = computed_checksum
                packages.clear()
        else:
            mirror.fail()
        mirror.save()
Esempio n. 8
0
def refresh_deb_repo(repo):
    """ Refresh a debian repo.
        Checks for the Packages* files to determine what the mirror urls
        are and then downloads and extracts packages from those files.
    """

    formats = ['Packages.bz2', 'Packages.gz', 'Packages']
    if lzma is not None:
        formats.insert(0, 'Packages.xz')

    ts = datetime.now().replace(microsecond=0)
    for mirror in repo.mirror_set.filter(refresh=True):
        res = find_mirror_url(mirror.url, formats)
        mirror.last_access_ok = response_is_valid(res)

        if mirror.last_access_ok:
            mirror_url = res.url
            text = 'Found deb repo - {0!s}'.format(mirror_url)
            info_message.send(sender=None, text=text)
            data = download_url(res, 'Downloading repo info:')
            if data is None:
                mirror.fail()
                return
            computed_checksum = get_checksum(data, Checksum.sha1)
            if mirror.file_checksum == computed_checksum:
                text = 'Mirror checksum has not changed, '
                text += 'not refreshing package metadata'
                warning_message.send(sender=None, text=text)
            else:
                packages = extract_deb_packages(data, mirror_url)
                mirror.last_access_ok = True
                mirror.timestamp = ts
                update_mirror_packages(mirror, packages)
                mirror.file_checksum = computed_checksum
                packages.clear()
        else:
            mirror.fail()
        mirror.save()
Esempio n. 9
0
    if log_filename == "stdout":
        log_file = sys.stdout
    else:
        try:
            log_file = open(log_filename, 'w')
        except IOError:
            exit("Unable to open " + log_filename + ".")

    next_acknum = 0

    # Receive first packet
    packet, addr = recv_sock.recvfrom(576)
    source_port, dest_port, seqnum, acknum, header_length, \
        ack, final, window_size, contents = util.unpack(packet)

    checksum = util.get_checksum(packet)
    packet_valid = checksum == 0 and next_acknum == acknum

    if packet_valid:
        recv_file.write(contents)
        next_acknum += 1

    log = str(datetime.datetime.now()) + " " + str(source_port) + " " + str(
        dest_port) + " " + str(seqnum) + " " + str(acknum)
    log_file.write(log + "\n")

    # Establish ack socket connection
    ack_sock.connect((sender_ip, sender_port))
    out_port = ack_sock.getsockname()[1]
    ack_segment = util.make_packet(out_port, sender_port, seqnum, acknum,
                                   packet_valid, False, 1, "")
Esempio n. 10
0
 def verified(self):
     return self.present and (get_checksum(self.fpath) == self.checksum
                              or self.checksum is None)
    if log_filename == "stdout":
        log_file = sys.stdout
    else:
        try:
            log_file = open(log_filename, 'w')
        except IOError:
            exit("Unable to open " + log_filename + ".")

    next_acknum = 0

    # Receive first packet
    packet, addr = recv_sock.recvfrom(576)
    source_port, dest_port, seqnum, acknum, header_length, \
        ack, final, window_size, contents = util.unpack(packet)

    checksum = util.get_checksum(packet)
    packet_valid = checksum == 0 and next_acknum == acknum

    if packet_valid:
        recv_file.write(contents)
        next_acknum += 1

    log = str(datetime.datetime.now()) + " " + str(source_port) + " " + str(dest_port) + " " + str(seqnum) + " " + str(
        acknum)
    log_file.write(log + "\n")

    # Establish ack socket connection
    ack_sock.connect((sender_ip, sender_port))
    out_port = ack_sock.getsockname()[1]
    ack_segment = util.make_packet(out_port, sender_port,
                                   seqnum, acknum, packet_valid,