def setUp(self): super(TestThinPack, self).setUp() self.store = MemoryObjectStore() self.blobs = {} for blob in ('foo', 'bar', 'foo1234', 'bar2468'): self.blobs[blob] = make_object(Blob, data=blob) self.store.add_object(self.blobs['foo']) self.store.add_object(self.blobs['bar']) # Build a thin pack. 'foo' is as an external reference, 'bar' an # internal reference. self.pack_dir = tempfile.mkdtemp() self.addCleanup(shutil.rmtree, self.pack_dir) self.pack_prefix = os.path.join(self.pack_dir, 'pack') f = open(self.pack_prefix + '.pack', 'wb') try: build_pack(f, [ (REF_DELTA, (self.blobs['foo'].id, 'foo1234')), (Blob.type_num, 'bar'), (REF_DELTA, (self.blobs['bar'].id, 'bar2468'))], store=self.store) finally: f.close() # Index the new pack. pack = self.make_pack(True) data = PackData(pack._data_path) data.pack = pack data.create_index(self.pack_prefix + '.idx') del self.store[self.blobs['bar'].id]
def move_in_thin_pack(self, path): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param path: Path to the pack file. """ data = PackData(path) # Write index for the thin pack (do we really need this?) temppath = os.path.join(self.pack_dir, sha_to_hex(urllib2.randombytes(20))+".tempidx") data.create_index_v2(temppath, self.get_raw) p = Pack.from_objects(data, load_pack_index(temppath)) # Write a full pack version temppath = os.path.join(self.pack_dir, sha_to_hex(urllib2.randombytes(20))+".temppack") write_pack(temppath, ((o, None) for o in p.iterobjects(self.get_raw)), len(p)) pack_sha = load_pack_index(temppath+".idx").objects_sha1() newbasename = os.path.join(self.pack_dir, "pack-%s" % pack_sha) os.rename(temppath+".pack", newbasename+".pack") os.rename(temppath+".idx", newbasename+".idx") self._add_known_pack(newbasename)
def upload_pack_file(self, path): p = PackData(path) entries = p.sorted_entries() # get the sha1 of the pack, same method as dulwich's move_in_pack() pack_sha = iter_sha1(e[0] for e in entries) key_prefix = calc_pack_prefix(self.prefix, pack_sha) pack_key_name = '%s.pack' % key_prefix # FIXME: LOCK HERE? Possibly different pack files could # have the same shas, depending on compression? log.debug('Uploading %s to %s' % (path, pack_key_name)) pack_key = self.bucket.new_key(pack_key_name) pack_key.set_contents_from_filename(path) index_key_name = '%s.idx' % key_prefix index_key = self.bucket.new_key(index_key_name) index_fd, index_path = tempfile.mkstemp(suffix = '.idx') try: f = os.fdopen(index_fd, 'wb') write_pack_index_v2(f, entries, p.get_stored_checksum()) os.fsync(index_fd) f.close() log.debug('Uploading %s to %s' % (index_path, index_key_name)) index_key.set_contents_from_filename(index_path) finally: os.remove(index_path) p.close() return self._create_pack(key_prefix)
def fetch_objects(self, determine_wants, graph_walker, progress=None): fd, path = tempfile.mkstemp(suffix=".pack") self.fetch_pack(determine_wants, graph_walker, lambda x: os.write(fd, x), progress) os.close(fd) basename = path[:-len(".pack")] p = PackData(path) p.create_index_v2(basename+".idx") pack = Pack(basename) os.remove(path) return (len(p), pack.iterobjects())
def move_in_pack(self, path): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param path: Path to the pack file. """ p = PackData(path) entries = p.sorted_entries() basename = os.path.join(self.pack_dir, "pack-%s" % iter_sha1(entry[0] for entry in entries)) write_pack_index_v2(basename+".idx", entries, p.get_stored_checksum()) os.rename(path, basename + ".pack") self._add_known_pack(basename)
def apply_pack(self, refs, read): """ apply pack from client to current repository """ fd, path = tempfile.mkstemp(suffix=".pack") f = os.fdopen(fd, 'w') f.write(read()) f.close() p = PackData(path) entries = p.sorted_entries() write_pack_index_v2(path[:-5]+".idx", entries, p.calculate_checksum()) def get_objects(): pack = Pack(path[:-5]) for obj in pack.iterobjects(): yield obj target = Repository.open(self.directory) target.lock_write() try: target.start_write_group() try: import_git_objects(target, self.mapping, iter(get_objects())) finally: target.commit_write_group() finally: target.unlock() for oldsha, sha, ref in refs: if ref[:11] == 'refs/heads/': branch_nick = ref[11:] try: target_dir = BzrDir.open(self.directory + "/" + branch_nick) except: target_dir = BzrDir.create(self.directory + "/" + branch_nick) try: target_branch = target_dir.open_branch() except: target_branch = target_dir.create_branch() rev_id = self.mapping.revision_id_foreign_to_bzr(sha) target_branch.generate_revision_history(rev_id)
def commit(): f.seek(0) pack = PackData(file=f, filename="") entries = pack.sorted_entries() if len(entries): basename = posixpath.join(self.pack_dir, "pack-%s" % iter_sha1(entry[0] for entry in entries)) index = BytesIO() write_pack_index_v2(index, entries, pack.get_stored_checksum()) self.scon.put_object(basename + ".pack", f) f.close() self.scon.put_object(basename + ".idx", index) index.close() final_pack = SwiftPack(basename, scon=self.scon) final_pack.check_length_and_checksum() self._add_known_pack(basename, final_pack) return final_pack else: return None
def setUp(self): conf = swift.load_conf(file=StringIO(config_file % def_config_file)) sos = swift.SwiftObjectStore(FakeSwiftConnector("fakerepo", conf=conf)) commit_amount = 10 self.commits = create_commits(length=commit_amount, marker="m") data = [(d.type_num, d.as_raw_string()) for d in self.commits] f = StringIO() fi = StringIO() expected = build_pack(f, data, store=sos) entries = [(sha, ofs, checksum) for ofs, _, _, sha, checksum in expected] self.pack_data = PackData.from_file(file=f, size=None) write_pack_index_v2(fi, entries, self.pack_data.calculate_checksum()) fi.seek(0) self.pack_index = load_pack_index_file("", fi)
def move_in_pack(self, path): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param path: Path to the pack file. """ with PackData(path) as p: entries = p.sorted_entries() basename = self._get_pack_basepath(entries) with GitFile(basename + ".idx", "wb") as f: write_pack_index_v2(f, entries, p.get_stored_checksum()) os.rename(path, basename + ".pack") final_pack = Pack(basename) self._add_known_pack(basename, final_pack) return final_pack
def data_loader(): # read and writable temporary file pack_tmpfile = tempfile.NamedTemporaryFile() # download into temporary file log.debug('Downloading pack %s into %s' % (path, pack_tmpfile)) pack_key = self.bucket.new_key('%s.pack' % path) # store pack_key.get_contents_to_file(pack_tmpfile) log.debug('Filesize is %d' % pack_key.size) log.debug('Rewinding...') pack_tmpfile.flush() pack_tmpfile.seek(0) return PackData.from_file(pack_tmpfile, pack_key.size)
def setUp(self): conf = swift.load_conf(file=StringIO(config_file % def_config_file)) sos = swift.SwiftObjectStore( FakeSwiftConnector('fakerepo', conf=conf)) commit_amount = 10 self.commits = create_commits(length=commit_amount, marker="m") data = [(d.type_num, d.as_raw_string()) for d in self.commits] f = StringIO() fi = StringIO() expected = build_pack(f, data, store=sos) entries = [(sha, ofs, checksum) for ofs, _, _, sha, checksum in expected] self.pack_data = PackData.from_file(file=f, size=None) write_pack_index_v2( fi, entries, self.pack_data.calculate_checksum()) fi.seek(0) self.pack_index = load_pack_index_file('', fi)
def move_in_thin_pack(self, f): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param path: Path to the pack file. """ f.seek(0) p = Pack('', resolve_ext_ref=self.get_raw) p._data = PackData.from_file(f, len(f.getvalue())) p._data.pack = p p._idx_load = lambda: MemoryPackIndex( p.data.sorted_entries(), p.data.get_stored_checksum()) pack_sha = p.index.objects_sha1() with self.pack_transport.open_write_stream( "pack-%s.pack" % pack_sha.decode('ascii')) as datafile: entries, data_sum = write_pack_objects(datafile, p.pack_tuples()) entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()]) with self.pack_transport.open_write_stream( "pack-%s.idx" % pack_sha.decode('ascii')) as idxfile: write_pack_index_v2(idxfile, entries, data_sum)
def move_in_pack(self, path): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param path: Path to the pack file. """ p = PackData(path) entries = p.sorted_entries() basename = os.path.join(self.pack_dir, "pack-%s" % iter_sha1(entry[0] for entry in entries)) f = GitFile(basename+".idx", "wb") try: write_pack_index_v2(f, entries, p.get_stored_checksum()) finally: f.close() p.close() os.rename(path, basename + ".pack") final_pack = Pack(basename) self._add_known_pack(final_pack) return final_pack
def commit(): p = PackData.from_file(BytesIO(f.getvalue()), f.tell()) f.close() for obj in PackInflater.for_pack_data(p, self.get_raw): self.add_object(obj)
def test_from_file(self): path = os.path.join(self.datadir, "pack-%s.pack" % pack1_sha) PackData.from_file(open(path), os.path.getsize(path))
def commit(): p = PackData.from_file(StringIO(f.getvalue()), f.tell()) f.close() self.add_objects([obj for obj in PackInflater.for_pack_data(p)])
def test_from_file(self): path = os.path.join(self.datadir, b'pack-' + pack1_sha + b'.pack') with open(path, 'rb') as f: PackData.from_file(f, os.path.getsize(path))
def commit(): p = PackData.from_file(StringIO(f.getvalue()), f.tell()) f.close() for obj in PackInflater.for_pack_data(p): self._data[obj.id] = obj
def test_from_file(self): path = os.path.join(self.datadir, 'pack-%s.pack' % pack1_sha) PackData.from_file(open(path), os.path.getsize(path))
def get_pack_data(self, sha): """Returns a PackData object from the datadir with the given sha""" return PackData(os.path.join(self.datadir, 'pack-%s.pack' % sha))
def test_from_file(self): path = os.path.join(self.datadir, 'pack-%s.pack' % pack1_sha.decode('ascii')) with open(path, 'rb') as f: PackData.from_file(f, os.path.getsize(path))
def _pack_data() -> PackData: pack_data_bytes = self._http_get(f"objects/pack/{pack_name}") return PackData(pack_name, file=pack_data_bytes)
def test_from_file(self): path = os.path.join(self.datadir, 'pack-%s.pack' % str(pack1_sha)) with PackData.from_file(open(path, 'rb'), os.path.getsize(path)) as p: pass
def get_pack_data(self, sha): """Returns a PackData object from the datadir with the given sha""" return PackData( os.path.join(self.datadir, "pack-%s.pack" % sha.decode("ascii")))
def test_from_file(self): path = os.path.join(self.datadir, "pack-%s.pack" % pack1_sha.decode("ascii")) with open(path, "rb") as f: PackData.from_file(f, os.path.getsize(path))
def commit(): p = PackData.from_file(BytesIO(f.getvalue()), f.tell()) f.close() for obj in PackInflater.for_pack_data(p): self._data[obj.id] = obj
def store(): p = PackData.from_file(StringIO(fileContents.getvalue()), fileContents.tell()) for obj in PackInflater.for_pack_data(p): self.add_object(obj)