Example #1
0
    def test_multiple_updates(self):
        a = self.make_empty_archive('A')
        m = FileManifest()
        data0 = ( \
            ('foo.txt', 'This is the foo file.\n'),
            ('empty.txt', ''),
            ('big.txt', '*' * (1 * 128)),
            )

        print "manifest sha: ", str_sha(m.stored_sha)
        m.update(a, entries_from_seq(self.tmps, data0))
        print "manifest sha: ", str_sha(m.stored_sha)

        dump_archive(a, "AFTER FIRST WRITE:")
        verify_manifest(a, m)

        data1 = ( \
            ('foo.txt', 'This is the foo file.\n'),
            ('empty.txt', ''),
            ('big.txt', 'hello' + ('*' * (1 * 128))),
            )

        m.update(a, entries_from_seq(self.tmps, data1))
        print "manifest sha: ", str_sha(m.stored_sha)
        dump_archive(a)
        verify_link_map(a.blocks.link_map)
        verify_manifest(a, m)
Example #2
0
 def test_single_update(self):
     a = self.make_empty_archive('A')
     m = FileManifest()
     data = ( \
         ('foo.txt', 'This is the foo file.\n'),
         ('empty.txt', ''),
         ('big.txt', '*' * (1024 * 128)),
         )
     entries = entries_from_seq(self.tmps, data)
     m.update(a, entries)
     dump_archive(a)
Example #3
0
 def test_single_update(self):
     a = self.make_empty_archive('A')
     m = FileManifest()
     data = ( \
         ('foo.txt', 'This is the foo file.\n'),
         ('empty.txt', ''),
         ('big.txt', '*' * (1024 * 128)),
         )
     entries = entries_from_seq(self.tmps, data)
     m.update(a, entries)
     dump_archive(a)
Example #4
0
def local_create(cache_dir, uri, from_dir):
    """ Create a new local archive. """

    # Load cached topkey
    archive = create_archive(cache_dir, uri)
    try:
        # Create an empty manifest and use it to update the archive.
        manifest = FileManifest()

        manifest.update(
            archive,
            entries_from_dir(from_dir, True, make_skip_regex(cache_dir)))

        return provisional_top_key(archive, manifest, ((), (), 0))
    finally:
        archive.close()
Example #5
0
def local_create(cache_dir, uri, from_dir):
    """ Create a new local archive. """

    # Load cached topkey
    archive = create_archive(cache_dir, uri)
    try:
        # Create an empty manifest and use it to update the archive.
        manifest = FileManifest()

        manifest.update(archive,
                        entries_from_dir(from_dir,
                                         True,
                                         make_skip_regex(cache_dir)))

        return provisional_top_key(archive, manifest, ((), (), 0))
    finally:
        archive.close()
Example #6
0
def local_reinsert(cache_dir, uri):
    """ Return the top_key, file list info needed to fully reinsert
        the archive. """
    # Load cached topkey
    top_key, archive = load_cached_archive(cache_dir, uri)
    try:
        manifest = FileManifest.from_archive(archive, top_key[1][0][0])
        return provisional_top_key(archive, manifest, top_key, True)
    finally:
        archive.close()
Example #7
0
def local_reinsert(cache_dir, uri):
    """ Return the top_key, file list info needed to fully reinsert
        the archive. """
    # Load cached topkey
    top_key, archive = load_cached_archive(cache_dir, uri)
    try:
        manifest = FileManifest.from_archive(archive, top_key[1][0][0])
        return provisional_top_key(archive, manifest, top_key, True)
    finally:
        archive.close()
Example #8
0
    def test_multiple_updates(self):
        a = self.make_empty_archive('A')
        m = FileManifest()
        data0 = ( \
            ('foo.txt', 'This is the foo file.\n'),
            ('empty.txt', ''),
            ('big.txt', '*' * (1 * 128)),
            )

        print "manifest sha: ", str_sha(m.stored_sha)
        m.update(a, entries_from_seq(self.tmps, data0))
        print "manifest sha: ", str_sha(m.stored_sha)

        dump_archive(a, "AFTER FIRST WRITE:")
        verify_manifest(a, m)

        data1 = ( \
            ('foo.txt', 'This is the foo file.\n'),
            ('empty.txt', ''),
            ('big.txt', 'hello' + ('*' * (1 * 128))),
            )

        m.update(a, entries_from_seq(self.tmps, data1))
        print "manifest sha: ", str_sha(m.stored_sha)
        dump_archive(a)
        verify_link_map(a.blocks.link_map)
        verify_manifest(a, m)
Example #9
0
    def test_many_updates(self):

        a = self.make_empty_archive('A')
        m = FileManifest()

        files = ("A.txt", "B.txt", "C.txt")

        updates = 100
        for dummy in range(0, updates):
            names = list(files)
            random.shuffle(names)
            #names = names[:random.randrange(1, len(files))]
            data = []
            for name in names:
                text = ''
                if name in m.name_map:
                    tmp = self.tmps.make_temp_file()
                    a.get_file(m.name_map[name][1], tmp)
                    text = self.read_file(tmp)
                text += "\n".join([line for line in lines(20)])

                data.append((name, text))

            #print "updating:"
            #for value in data:
            #    print value[0], len(value[1])

            #print "manifest sha: ", str_sha(m.stored_sha)
            #dump_archive(a, "BEFORE UPDATE: %i" % count, True)
            m.update(a, entries_from_seq(self.tmps, data))
            #print "manifest sha: ", str_sha(m.stored_sha)

            #dump_archive(a, "AFTER UPDATE: %i" % count, True)
            verify_manifest(a, m, True)
            verify_link_map(a.blocks.link_map)
            dump_blocks(a.blocks, None, True)

        a.close()
Example #10
0
    def test_many_updates(self):

        a = self.make_empty_archive('A')
        m = FileManifest()

        files = ("A.txt", "B.txt", "C.txt")

        updates = 100
        for dummy in range(0, updates):
            names = list(files)
            random.shuffle(names)
            #names = names[:random.randrange(1, len(files))]
            data = []
            for name in names:
                text = ''
                if name in m.name_map:
                    tmp = self.tmps.make_temp_file()
                    a.get_file(m.name_map[name][1], tmp)
                    text = self.read_file(tmp)
                text += "\n".join([line for line in lines(20)])

                data.append((name, text))

            #print "updating:"
            #for value in data:
            #    print value[0], len(value[1])

            #print "manifest sha: ", str_sha(m.stored_sha)
            #dump_archive(a, "BEFORE UPDATE: %i" % count, True)
            m.update(a, entries_from_seq(self.tmps, data))
            #print "manifest sha: ", str_sha(m.stored_sha)

            #dump_archive(a, "AFTER UPDATE: %i" % count, True)
            verify_manifest(a, m, True)
            verify_link_map(a.blocks.link_map)
            dump_blocks(a.blocks, None, True)

        a.close()
Example #11
0
def local_update(cache_dir, uri, from_dir):
    """ Update the archive by inserting deltas against from_dir. """
    # Load cached topkey
    top_key, archive = load_cached_archive(cache_dir, uri)
    try:
        # Load the old file manifest and use it to update.
        manifest = FileManifest.from_archive(archive, top_key[1][0][0])
        try:
            manifest.update(archive,
                            entries_from_dir(from_dir, True,
                                             make_skip_regex(cache_dir)))
        except UpToDateException:
            # Hmmm don't want to force client code
            # to import archive module
            return (None, None)

        return provisional_top_key(archive, manifest, top_key)
    finally:
        archive.close()
Example #12
0
def local_update(cache_dir, uri, from_dir):
    """ Update the archive by inserting deltas against from_dir. """
    # Load cached topkey
    top_key, archive = load_cached_archive(cache_dir, uri)
    try:
        # Load the old file manifest and use it to update.
        manifest = FileManifest.from_archive(archive, top_key[1][0][0])
        try:
            manifest.update(
                archive,
                entries_from_dir(from_dir, True, make_skip_regex(cache_dir)))
        except UpToDateException:
            # Hmmm don't want to force client code
            # to import archive module
            return (None, None)

        return provisional_top_key(archive, manifest, top_key)
    finally:
        archive.close()
Example #13
0
def local_synch(ui_, cache_dir, uri, to_dir):
    """ Update to_dir from the archive in cache_dir.

        CAUTION: May delete files and directories.
    """

    top_key, archive = load_cached_archive(cache_dir, uri)
    try:
        # Load the old file manifest and use it to extract.
        manifest = FileManifest.from_archive(archive, top_key[1][0][0])

        result = manifest_to_dir(archive, manifest,
                                 to_dir, make_skip_regex(cache_dir))
        ui_.status(("Created: %i, Modified: %i, Removed: %i\n") %
                   (len(result[0]), len(result[1]), len(result[2])))

        if len(result[3]) > 0:
            ui_.status("Removed %i local %s.\n" % (len(result[3]),
                choose_word(result[3] == 1, "subdirectory", "subdirectories")))

    finally:
        archive.close()
Example #14
0
def local_synch(ui_, cache_dir, uri, to_dir):
    """ Update to_dir from the archive in cache_dir.

        CAUTION: May delete files and directories.
    """

    top_key, archive = load_cached_archive(cache_dir, uri)
    try:
        # Load the old file manifest and use it to extract.
        manifest = FileManifest.from_archive(archive, top_key[1][0][0])

        result = manifest_to_dir(archive, manifest, to_dir,
                                 make_skip_regex(cache_dir))
        ui_.status(("Created: %i, Modified: %i, Removed: %i\n") %
                   (len(result[0]), len(result[1]), len(result[2])))

        if len(result[3]) > 0:
            ui_.status("Removed %i local %s.\n" %
                       (len(result[3]),
                        choose_word(result[3] == 1, "subdirectory",
                                    "subdirectories")))

    finally:
        archive.close()
Example #15
0
    def test_hg_repo_torture_test(self):
        if HG_REPO_DIR == '':
            print "Set HG_REPO_DIR!"
            self.assertTrue(False)

        writer = self.make_empty_archive('hgtst')
        manifest = FileManifest()

        rev = 0
        max_rev = 1  # Set below
        while rev < max_rev:
            target_dir = os.path.join(self.tmp_dir, '__hg_repo__')
            if os.path.exists(target_dir):
                shutil.rmtree(target_dir)  # DANGEROUS

            # export the repo
            # FIX: Wacky way to set max_rev.
            print "Exporting rev: ", rev
            max_rev = export_hg_repo(HG_REPO_DIR, target_dir, rev)
            if rev >= max_rev:
                break

            # put the export dir into the archive
            # print "Inserting into the archive..."

            entries = entries_from_dir(target_dir, True)
            manifest.update(writer, entries)

            # Will be written into Freenet top key
            # along with rest of archive info.
            s3kr1t = manifest.stored_sha

            dump_blocks(writer.blocks, None, True)
            # create a second archive instance from the same block files.
            # REDFLAG: Would this work on windoze?
            #          writer still has files open for reading.
            reader = self.load_archive('hgtst')
            read_manifest = FileManifest.from_archive(reader, s3kr1t)
            # REDFLAG: audit for other places where I could do
            # direct dict compares?
            assert (read_manifest.name_map == manifest.name_map)

            # clean the archive output dir
            unarchived_dir = os.path.join(self.tmp_dir, '__unarchived__')
            if os.path.exists(unarchived_dir):
                shutil.rmtree(unarchived_dir)  # DANGEROUS

            os.makedirs(unarchived_dir)

            # extract the archive to the cleaned files
            manifest_to_dir(reader, read_manifest, unarchived_dir)
            reader.close()

            # diff the directories

            # A poor man's diff.
            insert_map = {}
            for entry in entries_from_dir(target_dir, True):
                insert_map[entry.get_name()] = get_file_sha(entry.make_file())
                entry.release()  # NOP

            unarchived_map = {}
            for entry in entries_from_dir(unarchived_dir, True):
                unarchived_map[entry.get_name()] = (get_file_sha(
                    entry.make_file()))
                entry.release()  # NOP

            assert len(insert_map) > 0
            assert insert_map == unarchived_map
            print "%i files compared equal." % len(insert_map)

            rev += 1
Example #16
0
    def test_hg_repo_torture_test(self):
        if HG_REPO_DIR == '':
            print "Set HG_REPO_DIR!"
            self.assertTrue(False)

        writer = self.make_empty_archive('hgtst')
        manifest = FileManifest()

        rev = 0
        max_rev = 1 # Set below
        while rev < max_rev:
            target_dir = os.path.join(self.tmp_dir, '__hg_repo__')
            if os.path.exists(target_dir):
                shutil.rmtree(target_dir) # DANGEROUS

            # export the repo
            # FIX: Wacky way to set max_rev.
            print "Exporting rev: ", rev
            max_rev = export_hg_repo(HG_REPO_DIR, target_dir, rev)
            if rev >= max_rev:
                break

            # put the export dir into the archive
            # print "Inserting into the archive..."

            entries = entries_from_dir(target_dir, True)
            manifest.update(writer, entries)

            # Will be written into Freenet top key
            # along with rest of archive info.
            s3kr1t = manifest.stored_sha

            dump_blocks(writer.blocks, None, True)
            # create a second archive instance from the same block files.
            # REDFLAG: Would this work on windoze?
            #          writer still has files open for reading.
            reader = self.load_archive('hgtst')
            read_manifest = FileManifest.from_archive(reader, s3kr1t)
            # REDFLAG: audit for other places where I could do
            # direct dict compares?
            assert (read_manifest.name_map ==  manifest.name_map)

            # clean the archive output dir
            unarchived_dir = os.path.join(self.tmp_dir, '__unarchived__')
            if os.path.exists(unarchived_dir):
                shutil.rmtree(unarchived_dir) # DANGEROUS

            os.makedirs(unarchived_dir)

            # extract the archive to the cleaned files
            manifest_to_dir(reader, read_manifest, unarchived_dir)
            reader.close()

            # diff the directories

            # A poor man's diff.
            insert_map = {}
            for entry in entries_from_dir(target_dir, True):
                insert_map[entry.get_name()] = get_file_sha(entry.make_file())
                entry.release() # NOP

            unarchived_map = {}
            for entry in entries_from_dir(unarchived_dir, True):
                unarchived_map[entry.get_name()] = (
                    get_file_sha(entry.make_file()))
                entry.release() # NOP


            assert len(insert_map) > 0
            assert insert_map == unarchived_map
            print "%i files compared equal." % len(insert_map)

            rev += 1