Exemple #1
0
def _create_file(p4, client_name, local_path, file_content):
    """Create and submit a file.

    Write a file to the local Git Fusion workspace and then add and submit to
    Perforce. NOP if file already exists in Perforce after a 'p4 sync'.
    """
    filename = os.path.basename(local_path)
    with p4gf_util.restore_client(p4, client_name):
        try:
            with p4.at_exception_level(p4.RAISE_NONE):
                # Sync the file and ensure we really have it.
                p4.run('sync', '-q', local_path)
                results = p4.run('have', local_path)
            if not results:
                LOG.debug("_write_file(): {} does not exist, will create...".format(local_path))
                # Perms are probably read-only, need to remove before writing.
                if os.path.exists(local_path):
                    os.remove(local_path)
                else:
                    p4gf_util.ensure_parent_dir(local_path)
                with open(local_path, 'w') as mf:
                    mf.write(file_content)
                desc = _("Creating initial '{filename}' file via p4gf_init.py")\
                    .format(filename=filename)
                with p4gf_util.NumberedChangelist(p4=p4, description=desc) as nc:
                    nc.p4run('add', local_path)
                    nc.submit()
                LOG.debug("_write_file(): successfully created {}".format(local_path))
                _info(_("File '{path}' created.").format(path=local_path))
            else:
                _info(_("File '{path}' already exists.").format(path=local_path))
        except P4.P4Exception as e:
            LOG.warning('error setting up {file} file: {e}'
                     .format(file=filename, e=str(e)))
Exemple #2
0
def _install_tag(repo, fname):
    """Given the path of a tag copied from Perforce object cache, copy
    the tag to the repository, with the appropriate name and SHA1.
    There may be multiple lightweight tags associated with the same
    SHA1, in which case multiple tags will be created.

    Arguments:
        repo -- pygit2 repository
        fname -- clientFile attr for sync'd tag
    """
    sha1 = fname[-42:].replace('/', '')
    LOG.debug("_install_tag() examining {}...".format(sha1))
    with open(fname, 'rb') as f:
        contents = f.read()
    try:
        zlib.decompress(contents)
        # Must be an annotated tag...
        blob_path = os.path.join('.git', 'objects', sha1[:2], sha1[2:])
        p4gf_util.ensure_parent_dir(blob_path)
        os.link(fname, blob_path)
        tag_obj = repo.get(sha1)
        tag_name = tag_obj.name
        LOG.debug("_install_tag() annotated tag {}".format(tag_name))
        _create_tag_ref(repo, tag_name, sha1)
    except zlib.error:
        # Lightweight tags are stored simply as the tag name, but
        # there may be more than one name for a single SHA1.
        tag_names = contents.decode('UTF-8')
        for name in tag_names.splitlines():
            LOG.debug("_install_tag() lightweight tag {}".format(name))
            _create_tag_ref(repo, name, sha1)
Exemple #3
0
def _configure_logger(config, section, name=None, ident=None):
    """Configure the named logger (or the root logger if name is None).

    Use provided settings, which likely came from _effective_config().

    :param dict config: logger settings (will be modified).
    :param str section: name of logging section (e.g. general).
    :param str name: name of the logger to configure (defaults to root logger).
    :param str ident: syslog identity, if handler is 'syslog'.

    """
    # pylint: disable=too-many-branches
    _deconfigure_logger(name)
    formatter = None
    if 'handler' in config:
        val = config.pop('handler')
        if val.startswith('syslog'):
            words = val.split(maxsplit=1)
            if len(words) > 1:
                handler = P4GFSysLogHandler(address=words[1], ident=ident)
            else:
                handler = P4GFSysLogHandler(ident=ident)
            formatter = P4GFSysLogFormatter()
        elif val == 'console':
            handler = logging.StreamHandler()
        else:
            sys.stderr.write(
                _('Git Fusion: unrecognized log handler: {}\n').format(val))
            handler = logging.StreamHandler()
    elif 'filename' in config:
        fpath = config.pop('filename')
        p4gf_util.ensure_parent_dir(fpath)
        handler = logging.FileHandler(fpath, 'a', 'utf-8')
        if fpath.endswith('.xml'):
            formatter = XmlFormatter()
        _rotate_log_file(fpath, section, config)
    else:
        handler = logging.StreamHandler()
    # Always remove these fake logging levels
    fs = config.pop('format', None)
    dfs = config.pop('datefmt', None)
    config.pop(_max_size_mb_name, None)
    config.pop(_retain_count_name, None)
    fs = _include_process_id(config, fs)
    if formatter is None:
        # Build the formatter if one has not already been.
        formatter = BaseFormatter(fs, dfs)
    handler.setFormatter(formatter)
    logger = logging.getLogger(name)
    logger.propagate = False
    logger.addHandler(handler)
    logger.setLevel(config.pop('root').upper())

    # Set the logging levels based on the remaining settings
    for key, val in config.items():
        logging.getLogger(key).setLevel(val.upper())
Exemple #4
0
def _create_tag_ref(repo, name, sha1):
    """Create a single tag reference in the repository."""
    if not name or not sha1:
        LOG.warning("_create_tag_ref() invalid params: ({}, {})".format(
            name, sha1))
        return
    if repo.get(sha1) is None:
        LOG.warning("_create_tag_ref() unknown object: {}".format(sha1))
        return
    tag_file = os.path.join('.git', 'refs', 'tags', name)
    p4gf_util.ensure_parent_dir(tag_file)
    with open(tag_file, 'w') as f:
        f.write(sha1)
Exemple #5
0
def _log_file_lock(name):
    """Acquire exclusive access to the lock for rotating the logs.

    :param str name: name of the logging section (e.g. general)

    """
    path = "{home}/locks/log_{name}.lock".format(home=p4gf_const.P4GF_HOME,
                                                 name=name)
    p4gf_util.ensure_parent_dir(path)
    fd = os.open(path, os.O_CREAT | os.O_WRONLY)
    fcntl.flock(fd, fcntl.LOCK_EX)
    os.write(fd, bytes(str(os.getpid()), "utf-8"))
    try:
        yield
    finally:
        os.close(fd)
def write_git_object_from_sha1(view_repo, object_sha1, target_path):
    """Utility to extract commit,tree,blob,tag objects from a git (packed/unpacked) object-store,
    and write git object to file path.
    The target_path will be written with a re-constructed git object extracted via pygit2
    :param: view_repo     pygit2 repository
    :param: object_sha1   sha1 in git object store
    :param: target_path   path of new git object
    """
    data = cat_file(object_sha1, view_repo)

    p4gf_util.ensure_parent_dir(target_path)
    try:
        with open(target_path, 'wb') as compressed:
            compress = zlib.compressobj()
            compressed.write(compress.compress(data))
            compressed.write(compress.flush())
    except Exception as e:
        LOG.exception("Failed to write sha1='%s' from repo to path '%s'",
                      object_sha1, target_path)
        raise RuntimeError(
            _("Failed to write sha1='{sha1}' from repo to path {path}.\n{exception}"
              ).format(sha1=object_sha1, path=target_path, exception=e))
def _log_crash_report(errmsg):
    """Capture the fast-import crash report to a separate log file.

    :type errmsg: str
    :param errmsg: error message to write to Git Fusion log.

    """
    log = logging.getLogger('failures')
    if not log.isEnabledFor(logging.ERROR):
        return

    log.error(errmsg)

    # For each crash report we find, dump its contents.
    # In theory we clean up after a crash so there should be only one.
    cwd = os.getcwd()
    for entry in os.listdir('.git'):
        if entry.startswith('fast_import_crash_'):
            report_path = os.path.join(cwd, '.git', entry)
            date = datetime.datetime.now()
            date_str = date.isoformat().replace(':', '').split('.')[0]
            log_path = p4gf_const.P4GF_FAILURE_LOG.format(
                P4GF_DIR=p4gf_const.P4GF_HOME,
                prefix='git-fast-import-',
                date=date_str)
            p4gf_util.ensure_parent_dir(log_path)
            # Compress the file to help in preserving its integrity.
            gz_path = log_path + '.gz'
            log.error(
                'Compressing fast-import crash report to {}'.format(gz_path))
            with open(report_path, 'rb') as fin, gzip.open(gz_path,
                                                           'wb') as fout:
                while True:
                    b = fin.read(100 * 1024)
                    if not len(b):
                        break
                    fout.write(b)
                fout.flush()
Exemple #8
0
def _add_tag(ctx, name, sha1, edit_list, add_list):
    """Add a tag to the object cache.

    If adding another lightweight tag that refers to the same object,
    edit the file rather than add.
    """
    LOG.debug("_add_tag() adding tag {}".format(name))
    fpath = os.path.join(ctx.gitlocalroot, _client_path(ctx, sha1))
    if os.path.exists(fpath):
        # Overwriting an existing tag? Git prohibits that.
        # But, another lightweight tag of the same object is okay.
        # Sanity check if this is a lightweight tag of an annotated
        # tag and reject with a warning.
        with open(fpath, 'rb') as f:
            contents = f.read()
        try:
            zlib.decompress(contents)
            return
        except zlib.error:
            pass
        # it's a lightweight tag, just append the name
        with open(fpath, 'ab') as f:
            f.write(b'\n')
            f.write(name.encode('UTF-8'))
        edit_list.append(fpath)
    else:
        obj = ctx.repo.get(sha1)
        if obj.type == pygit2.GIT_OBJ_TAG:
            LOG.debug("_add_tag() annotated tag {}".format(name))
            p4gf_git.write_git_object_from_sha1(ctx.repo, sha1, fpath)
        else:
            # Lightweight tags can be anything: commit, tree, blob
            LOG.debug("_add_tag() lightweight tag {}".format(name))
            p4gf_util.ensure_parent_dir(fpath)
            with open(fpath, 'wb') as f:
                f.write(name.encode('UTF-8'))
        add_list.append(fpath)
    def flush(self):
        """compress the last file, hash it and stick it in the repo

        Now that we've got the complete file contents, the header can be
        created and used along with the spooled content to create the sha1
        and zlib compressed blob content.  Finally that is written into
        the .git/objects dir.
        """
        if not self.rev:
            return
        size = self.tempfile.tell()
        if size > 0 and self.rev.is_symlink():
            # p4 print adds a trailing newline, which is no good for symlinks.
            self.tempfile.seek(-1, 2)
            b = self.tempfile.read(1)
            if b[0] == 10:
                size = self.tempfile.truncate(size - 1)
        self.tempfile.seek(0)
        self.total_byte_count += size
        self.printed_rev_count += 1
        compressed = tempfile.NamedTemporaryFile(delete=False, dir=self.tempdir,
                                                 prefix='p2g-blob-')
        compress = zlib.compressobj()
        # pylint doesn't understand dynamic definition of sha1 in hashlib
        # pylint: disable=E1101
        sha1 = hashlib.sha1()

        # pylint:disable=W1401
        # disable complaints about the null. We need that.
        # add header first
        header = ("blob " + str(size) + "\0").encode()
        compressed.write(compress.compress(header))
        sha1.update(header)

        # then actual contents
        chunksize = 4096
        while True:
            chunk = self.tempfile.read(chunksize)
            if chunk:
                compressed.write(compress.compress(chunk))
                sha1.update(chunk)
            else:
                break
        # pylint: enable=E1101
        compressed.write(compress.flush())
        compressed.close()
        digest = sha1.hexdigest()
        self.rev.sha1 = digest
        blob_path_tuple = _sha1_to_blob_path_tuple(self.rev.sha1)
        if not os.path.exists(blob_path_tuple.path):
            if not os.path.exists(blob_path_tuple.dir):
                os.makedirs(blob_path_tuple.dir)
            shutil.move(compressed.name, blob_path_tuple.path)
        else:
            os.remove(compressed.name)
        #self.revs.append(self.rev)
        symlink_path = _depot_rev_to_symlink( depot_path  = self.rev.depot_path
                                            , rev         = self.rev.revision
                                            , symlink_dir = self.symlink_dir )
        p4gf_util.ensure_parent_dir(symlink_path)

        e = os.path.islink(symlink_path)
        if not e:
            os.symlink(blob_path_tuple.path, symlink_path)

        _debug3('Printed {e} {blob} @{ch:<5} {rev:<50} {symlink}'
               , blob    = blob_path_tuple.path
               , symlink = symlink_path
               , rev     = self.rev.rev_path()
               , ch      = self.rev.change
               , e       = 'e' if e else ' ')
        self.rev = None