Esempio n. 1
0
    def __call__(self, store):
        changeset_chunks = ChunksCollection(
            progress_iter('Reading {} changesets', next(self._bundle, None)))

        for rev_chunk in progress_iter('Reading and importing {} manifests',
                                       next(self._bundle, None)):
            pass

        def enumerate_files(iter):
            last_name = None
            count_names = 0
            for count_chunks, (name, chunk) in enumerate(iter, start=1):
                if name != last_name:
                    count_names += 1
                last_name = name
                yield (count_chunks, count_names), chunk

        for rev_chunk in progress_enum(
                'Reading and importing {} revisions of {} files',
                enumerate_files(next(self._bundle, None))):
            pass

        if next(self._bundle, None) is not None:
            assert False
        del self._bundle

        for cs in progress_iter(
                'Importing {} changesets',
                changeset_chunks.iter_initialized(lambda x: x, store.changeset,
                                                  Changeset.from_chunk)):
            try:
                store.store_changeset(cs)
            except NothingToGraftException:
                logging.debug('Cannot graft %s, not importing.', cs.node)
Esempio n. 2
0
    def progress(iter, size=0):
        def _progress(iter, size):
            read = 0
            for chunk in iter:
                read += len(chunk)
                if size:
                    yield read * 100 / size, chunk
                else:
                    yield read, chunk

        fmt = ' {}%' if size else ' {} bytes'
        return progress_enum(fmt, _progress(iter, size))
Esempio n. 3
0
    def __call__(self, store):
        changeset_chunks = ChunksCollection(
            progress_iter('Reading {} changesets', next(self._bundle, None)))

        if experiment('store-manifest'):
            for rev_chunk in progress_iter(
                    'Reading and importing {} manifests',
                    next(self._bundle, None)):
                GitHgHelper.store('manifest', rev_chunk)
                store.check_manifest(rev_chunk)
        else:
            for mn in progress_iter(
                    'Reading and importing {} manifests',
                    iter_initialized(
                        store.manifest,
                        iter_chunks(next(self._bundle, None), ManifestInfo))):
                store.store_manifest(mn)

        def enumerate_files(iter):
            last_name = None
            count_names = 0
            for count_chunks, (name, chunk) in enumerate(iter):
                if name != last_name:
                    count_names += 1
                last_name = name
                yield (count_chunks, count_names), chunk

        for rev_chunk in progress_enum(
                'Reading and importing {} revisions of {} files',
                enumerate_files(next(self._bundle, None))):
            GitHgHelper.store('file', rev_chunk)

        if next(self._bundle, None) is not None:
            assert False
        del self._bundle

        for cs in progress_iter(
                'Importing {} changesets',
                changeset_chunks.iter_initialized(lambda x: x, store.changeset,
                                                  Changeset.from_chunk)):
            try:
                store.store_changeset(cs)
            except NothingToGraftException:
                logging.warn('Cannot graft %s, not importing.', cs.node)
Esempio n. 4
0
    def __call__(self, store):
        changeset_chunks = ChunksCollection(progress_iter(
            'Reading {} changesets', next(self._bundle, None)))

        for rev_chunk in progress_iter(
                'Reading and importing {} manifests',
                next(self._bundle, None)):
            if not self._use_store_changegroup:
                GitHgHelper.store('manifest', rev_chunk)

        def enumerate_files(iter):
            last_name = None
            count_names = 0
            for count_chunks, (name, chunk) in enumerate(iter, start=1):
                if name != last_name:
                    count_names += 1
                last_name = name
                yield (count_chunks, count_names), chunk

        for rev_chunk in progress_enum(
                'Reading and importing {} revisions of {} files',
                enumerate_files(next(self._bundle, None))):
            if not self._use_store_changegroup:
                GitHgHelper.store('file', rev_chunk)

        if next(self._bundle, None) is not None:
            assert False
        del self._bundle

        for cs in progress_iter(
                'Importing {} changesets',
                changeset_chunks.iter_initialized(lambda x: x, store.changeset,
                                                  Changeset.from_chunk)):
            try:
                store.store_changeset(cs)
            except NothingToGraftException:
                logging.warn('Cannot graft %s, not importing.', cs.node)
Esempio n. 5
0
def bundle_data(store, commits):
    manifests = OrderedDict()
    files = defaultdict(list)

    for node, parents in progress_iter('Bundling {} changesets', commits):
        if len(parents) > 2:
            raise Exception(
                'Pushing octopus merges to mercurial is not supported')

        changeset_data = store.read_changeset_data(node)
        is_new = changeset_data is None or check_enabled('bundle')
        if is_new:
            store.create_hg_metadata(node, parents)
        hg_changeset = store._changeset(node, include_parents=True)
        if is_new:
            store.add_head(hg_changeset.node, hg_changeset.parent1,
                           hg_changeset.parent2)
        yield hg_changeset
        manifest = hg_changeset.manifest
        if manifest not in manifests and manifest != NULL_NODE_ID:
            if manifest not in (store.changeset(p).manifest
                                for p in hg_changeset.parents):
                manifests[manifest] = hg_changeset.node

    yield None

    for manifest, changeset in progress_iter('Bundling {} manifests',
                                             iteritems(manifests)):
        hg_manifest = store.manifest(manifest, include_parents=True)
        hg_manifest.changeset = changeset
        yield hg_manifest
        manifest_ref = store.manifest_ref(manifest)
        parents = tuple(store.manifest_ref(p) for p in hg_manifest.parents)
        changes = get_changes(manifest_ref, parents)
        for path, hg_file, hg_fileparents in changes:
            if hg_file != NULL_NODE_ID:
                files[store.manifest_path(path)].append(
                    (hg_file, hg_fileparents, changeset, parents))

    yield None

    def iter_files(files):
        count_chunks = 0
        for count_names, path in enumerate(sorted(files), 1):
            yield (count_chunks, count_names), path
            nodes = set()
            for node, parents, changeset, mn_parents in files[path]:
                if node in nodes:
                    continue
                count_chunks += 1
                nodes.add(node)
                file = store.file(node, parents, mn_parents, path)
                file.changeset = changeset
                assert file.node == file.sha1
                yield (count_chunks, count_names), file

            yield (count_chunks, count_names), None

    for chunk in progress_enum('Bundling {} revisions of {} files',
                               iter_files(files)):
        yield chunk

    yield None
Esempio n. 6
0
def bundle_data(store, commits):
    manifests = OrderedDict()
    files = defaultdict(list)

    for node, parents in progress_iter('Bundling {} changesets', commits):
        if len(parents) > 2:
            raise Exception(
                'Pushing octopus merges to mercurial is not supported')

        changeset_data = store.read_changeset_data(node)
        is_new = changeset_data is None or check_enabled('bundle')
        if is_new:
            store.create_hg_metadata(node, parents)
        hg_changeset = store._changeset(node, include_parents=True)
        if is_new:
            store.add_head(hg_changeset.node, hg_changeset.parent1,
                           hg_changeset.parent2)
        yield hg_changeset
        manifest = hg_changeset.manifest
        if manifest not in manifests and manifest != NULL_NODE_ID:
            if manifest not in (store.changeset(p).manifest
                                for p in hg_changeset.parents):
                manifests[manifest] = hg_changeset.node

    yield None

    for manifest, changeset in progress_iter('Bundling {} manifests',
                                             manifests.iteritems()):
        hg_manifest = store.manifest(manifest, include_parents=True)
        hg_manifest.changeset = changeset
        yield hg_manifest
        manifest_ref = store.manifest_ref(manifest)
        parents = tuple(store.manifest_ref(p) for p in hg_manifest.parents)
        changes = get_changes(manifest_ref, parents)
        for path, hg_file, hg_fileparents in changes:
            if hg_file != NULL_NODE_ID:
                files[store.manifest_path(path)].append(
                    (hg_file, hg_fileparents, changeset, parents))

    yield None

    def iter_files(files):
        count_chunks = 0
        for count_names, path in enumerate(sorted(files), 1):
            yield (count_chunks, count_names), path
            nodes = set()
            for node, parents, changeset, mn_parents in files[path]:
                if node in nodes:
                    continue
                count_chunks += 1
                nodes.add(node)
                file = store.file(node, parents, mn_parents, path)
                file.changeset = changeset
                assert file.node == file.sha1
                yield (count_chunks, count_names), file

            yield (count_chunks, count_names), None

    for chunk in progress_enum('Bundling {} revisions of {} files',
                               iter_files(files)):
        yield chunk

    yield None