Exemple #1
0
def _calc_sha1_for_pipe(infile, outfile, digest_out_pipe_w, debug=False):
    """
    Read data from infile and write it to outfile, calculating a running SHA1
    digest along the way.  When infile hits end-of-file, send the digest in
    hex form to result_mpconn and exit.
    :param infile: file obj providing input for digest
    :param outfile: file obj destination for writing output
    :param digest_out_pipe_w: fileobj to write digest to
    :param debug: boolean used in exception handling
    """
    close_all_fds([infile, outfile, digest_out_pipe_w])
    digest = hashlib.sha1()
    try:
        while True:
            chunk = infile.read(euca2ools.BUFSIZE)
            if chunk:
                digest.update(chunk)
                outfile.write(chunk)
                outfile.flush()
            else:
                break
        digest_out_pipe_w.send(digest.hexdigest())
    except IOError:
        # HACK
        if not debug:
            return
        raise
    finally:
        infile.close()
        outfile.close()
        digest_out_pipe_w.close()
Exemple #2
0
def _copy_with_progress(infile, outfile, logger=None, debug=False):
    """
    Synchronously copy data from infile to outfile, writing to the logger
    the total number of bytes copied along the way if one was provided,
    and return the number of bytes copied.

    :param infile: file obj to read input from
    :param outfile: file obj to write output to
    """
    bytes_written = 0
    close_all_fds(except_fds=[infile, outfile])
    try:
        while not infile.closed:
            chunk = infile.read(euca2ools.BUFSIZE)
            if chunk:
                outfile.write(chunk)
                outfile.flush()
                bytes_written += len(chunk)
                if logger and (bytes_written % REPORT_CHUNK_SIZE == 0):
                    logger.debug("Bytes read from source: {0}".format(bytes_written))
            else:
                break
    except IOError:
        # HACK
        if not debug:
            return
        raise
    finally:
        infile.close()
    return bytes_written
Exemple #3
0
def _calc_sha1_for_pipe(infile, outfile, digest_out_pipe_w, debug=False):
    """
    Read data from infile and write it to outfile, calculating a running SHA1
    digest along the way.  When infile hits end-of-file, send the digest in
    hex form to result_mpconn and exit.
    :param infile: file obj providing input for digest
    :param outfile: file obj destination for writing output
    :param digest_out_pipe_w: fileobj to write digest to
    :param debug: boolean used in exception handling
    """
    close_all_fds([infile, outfile, digest_out_pipe_w])
    digest = hashlib.sha1()
    try:
        while True:
            chunk = infile.read(euca2ools.BUFSIZE)
            if chunk:
                digest.update(chunk)
                outfile.write(chunk)
                outfile.flush()
            else:
                break
        digest_out_pipe_w.send(digest.hexdigest())
    except IOError:
        # HACK
        if not debug:
            return
        raise
    finally:
        infile.close()
        outfile.close()
        digest_out_pipe_w.close()
Exemple #4
0
    def _concatenate_parts_to_file_for_pipe(self,
                                            outfile,
                                            image_parts,
                                            source_dir,
                                            debug=False):
        """
        Concatenate a list of 'image_parts' (files) found in 'source_dir' into
        pipeline fed by 'outfile'. Parts are checked against checksum contained
        in part obj against calculated checksums as they are read/written.

        :param outfile: file obj used to output concatenated parts to
        :param image_parts: list of euca2ools.manifest.part objs
        :param source_dir: local path to parts contained in image_parts
        :param debug: boolean used in exception handling
        """
        close_all_fds([outfile])
        part_count = len(image_parts)
        part_file = None
        try:
            for part in image_parts:
                self.log.debug("Concatenating Part:" + str(part.filename))
                sha1sum = hashlib.sha1()
                part_file_path = source_dir + "/" + part.filename
                with open(part_file_path) as part_file:
                    data = part_file.read(euca2ools.bundle.pipes._BUFSIZE)
                    while data:
                        sha1sum.update(data)
                        outfile.write(data)
                        outfile.flush()
                        data = part_file.read(euca2ools.bundle.pipes._BUFSIZE)
                    part_digest = sha1sum.hexdigest()
                    self.log.debug(
                        "PART NUMBER:" + str(image_parts.index(part) + 1) +
                        "/" + str(part_count))
                    self.log.debug('Part sha1sum:' + str(part_digest))
                    self.log.debug('Expected sum:' + str(part.hexdigest))
                    if part_digest != part.hexdigest:
                        raise ValueError('Input part file may be corrupt:{0} '
                                         .format(part.filename),
                                         '(expected digest: {0}, actual: {1})'
                                         .format(part.hexdigest, part_digest))
        except IOError as ioe:
            # HACK
            self.log.debug('Error in _concatenate_parts_to_file_for_pipe.' +
                           str(ioe))
            if not debug:
                return
            raise ioe
        finally:
            if part_file:
                part_file.close()
            self.log.debug('Concatentate done')
            self.log.debug('Closing write end of pipe after writing')
            outfile.close()
Exemple #5
0
def _create_tarball_from_stream(infile, outfile, tarinfo, debug=False):
    close_all_fds(except_fds=[infile, outfile])
    tarball = tarfile.open(mode='w|', fileobj=outfile,
                           bufsize=euca2ools.BUFSIZE)
    try:
        tarball.addfile(tarinfo, fileobj=infile)
    except IOError:
        # HACK
        if not debug:
            return
        raise
    finally:
        infile.close()
        tarball.close()
        outfile.close()
Exemple #6
0
def _create_tarball_from_stream(infile, outfile, tarinfo, debug=False):
    close_all_fds(except_fds=[infile, outfile])
    tarball = tarfile.open(mode='w|',
                           fileobj=outfile,
                           bufsize=euca2ools.BUFSIZE)
    try:
        tarball.addfile(tarinfo, fileobj=infile)
    except IOError:
        # HACK
        if not debug:
            return
        raise
    finally:
        infile.close()
        tarball.close()
        outfile.close()
Exemple #7
0
def _extract_from_tarball_stream(infile, outfile, debug=False):
    """
    Perform tar extract on infile and write to outfile
    :param infile: file obj providing input for tar
    :param outfile: file obj destination for tar output
    :param debug: boolean used in exception handling
    """
    close_all_fds([infile, outfile])
    tarball = tarfile.open(mode='r|', fileobj=infile)
    try:
        tarinfo = tarball.next()
        shutil.copyfileobj(tarball.extractfile(tarinfo), outfile)
    except IOError:
        # HACK
        if not debug:
            return
        raise
    finally:
        infile.close()
        tarball.close()
        outfile.close()
Exemple #8
0
def _extract_from_tarball_stream(infile, outfile, debug=False):
    """
    Perform tar extract on infile and write to outfile
    :param infile: file obj providing input for tar
    :param outfile: file obj destination for tar output
    :param debug: boolean used in exception handling
    """
    close_all_fds([infile, outfile])
    tarball = tarfile.open(mode='r|', fileobj=infile)
    try:
        tarinfo = tarball.next()
        shutil.copyfileobj(tarball.extractfile(tarinfo), outfile)
    except IOError:
        # HACK
        if not debug:
            return
        raise
    finally:
        infile.close()
        tarball.close()
        outfile.close()
 def __read_bundle_parts(self, manifest, outfile):
     close_all_fds(except_fds=[outfile])
     for part in manifest.image_parts:
         self.log.debug("opening part '%s' for reading", part.filename)
         digest = hashlib.sha1()
         with open(part.filename) as part_file:
             while True:
                 chunk = part_file.read(euca2ools.BUFSIZE)
                 if chunk:
                     digest.update(chunk)
                     outfile.write(chunk)
                     outfile.flush()
                 else:
                     break
             actual_hexdigest = digest.hexdigest()
             if actual_hexdigest != part.hexdigest:
                 self.log.error(
                     'rejecting unbundle due to part SHA1 '
                     'mismatch (expected: %s, actual: %s)', part.hexdigest,
                     actual_hexdigest)
                 raise RuntimeError(
                     "bundle part '{0}' appears to be corrupt (expected "
                     "SHA1: {1}, actual: {2}".format(
                         part.filename, part.hexdigest, actual_hexdigest))
Exemple #10
0
 def __read_bundle_parts(self, manifest, outfile):
     close_all_fds(except_fds=[outfile])
     for part in manifest.image_parts:
         self.log.debug("opening part '%s' for reading", part.filename)
         digest = hashlib.sha1()
         with open(part.filename) as part_file:
             while True:
                 chunk = part_file.read(euca2ools.BUFSIZE)
                 if chunk:
                     digest.update(chunk)
                     outfile.write(chunk)
                     outfile.flush()
                 else:
                     break
             actual_hexdigest = digest.hexdigest()
             if actual_hexdigest != part.hexdigest:
                 self.log.error('rejecting unbundle due to part SHA1 '
                                'mismatch (expected: %s, actual: %s)',
                                part.hexdigest, actual_hexdigest)
                 raise RuntimeError(
                     "bundle part '{0}' appears to be corrupt (expected "
                     "SHA1: {1}, actual: {2}"
                     .format(part.filename, part.hexdigest,
                             actual_hexdigest))