def move_in_thin_pack(self, f): """Move a specific file containing a pack into the pack directory. :note: The file should be on the same file system as the packs directory. :param path: Path to the pack file. """ f.seek(0) p = Pack('', resolve_ext_ref=self.get_raw) p._data = PackData.from_file(f, len(f.getvalue())) p._data.pack = p p._idx_load = lambda: MemoryPackIndex(p.data.sorted_entries(), p.data.get_stored_checksum()) pack_sha = p.index.objects_sha1() datafile = self.pack_transport.open_write_stream( "pack-%s.pack" % pack_sha.decode('ascii')) try: entries, data_sum = write_pack_objects(datafile, p.pack_tuples()) finally: datafile.close() entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()]) idxfile = self.pack_transport.open_write_stream( "pack-%s.idx" % pack_sha.decode('ascii')) try: write_pack_index_v2(idxfile, entries, data_sum) finally: idxfile.close() # TODO(jelmer): Just add new pack to the cache self._flush_pack_cache()
def setUp(self): conf = swift.load_conf(file=StringIO(config_file % def_config_file)) sos = swift.SwiftObjectStore(FakeSwiftConnector("fakerepo", conf=conf)) commit_amount = 10 self.commits = create_commits(length=commit_amount, marker="m") data = [(d.type_num, d.as_raw_string()) for d in self.commits] f = StringIO() fi = StringIO() expected = build_pack(f, data, store=sos) entries = [(sha, ofs, checksum) for ofs, _, _, sha, checksum in expected] self.pack_data = PackData.from_file(file=f, size=None) write_pack_index_v2(fi, entries, self.pack_data.calculate_checksum()) fi.seek(0) self.pack_index = load_pack_index_file("", fi)
def setUp(self): conf = swift.load_conf(file=BytesIO(config_file % def_config_file)) sos = swift.SwiftObjectStore(FakeSwiftConnector('fakerepo', conf=conf)) commit_amount = 10 self.commits = create_commits(length=commit_amount, marker="m") data = [(d.type_num, d.as_raw_string()) for d in self.commits] f = BytesIO() fi = BytesIO() expected = build_pack(f, data, store=sos) entries = [(sha, ofs, checksum) for ofs, _, _, sha, checksum in expected] self.pack_data = PackData.from_file(file=f, size=None) write_pack_index_v2(fi, entries, self.pack_data.calculate_checksum()) fi.seek(0) self.pack_index = load_pack_index_file('', fi)
def iter_objects(self, object_type: bytes) -> Iterator[ShaFile]: """Read all the objects of type `object_type` from the packfile""" if self.dumb: yield from self.dumb_fetcher.iter_objects(object_type) else: self.pack_buffer.seek(0) count = 0 for obj in PackInflater.for_pack_data( PackData.from_file(self.pack_buffer, self.pack_size) ): if obj.type_name != object_type: continue yield obj count += 1 logger.debug("packfile_read_count_%s=%s", object_type.decode(), count)
def data_loader(): # read and writable temporary file pack_tmpfile = tempfile.NamedTemporaryFile() # download into temporary file log.debug('Downloading pack %s into %s' % (path, pack_tmpfile)) pack_key = self.bucket.new_key('%s.pack' % path) # store pack_key.get_contents_to_file(pack_tmpfile) log.debug('Filesize is %d' % pack_key.size) log.debug('Rewinding...') pack_tmpfile.flush() pack_tmpfile.seek(0) return PackData.from_file(pack_tmpfile, pack_key.size)
def test_from_file(self): path = os.path.join(self.datadir, "pack-%s.pack" % pack1_sha) PackData.from_file(open(path), os.path.getsize(path))
def commit(): p = PackData.from_file(BytesIO(f.getvalue()), f.tell()) f.close() for obj in PackInflater.for_pack_data(p, self.get_raw): self.add_object(obj)
def test_from_file(self): path = os.path.join(self.datadir, b'pack-' + pack1_sha + b'.pack') with open(path, 'rb') as f: PackData.from_file(f, os.path.getsize(path))
def commit(): p = PackData.from_file(StringIO(f.getvalue()), f.tell()) f.close() for obj in PackInflater.for_pack_data(p): self._data[obj.id] = obj
def test_from_file(self): path = os.path.join(self.datadir, 'pack-%s.pack' % pack1_sha.decode('ascii')) with open(path, 'rb') as f: PackData.from_file(f, os.path.getsize(path))
def test_from_file(self): path = os.path.join(self.datadir, 'pack-%s.pack' % pack1_sha) PackData.from_file(open(path), os.path.getsize(path))
def test_from_file(self): path = os.path.join(self.datadir, "pack-%s.pack" % pack1_sha.decode("ascii")) with open(path, "rb") as f: PackData.from_file(f, os.path.getsize(path))
def commit(): p = PackData.from_file(StringIO(f.getvalue()), f.tell()) f.close() self.add_objects([obj for obj in PackInflater.for_pack_data(p)])
def commit(): p = PackData.from_file(BytesIO(f.getvalue()), f.tell()) f.close() for obj in PackInflater.for_pack_data(p): self._data[obj.id] = obj
def test_from_file(self): path = os.path.join(self.datadir, 'pack-%s.pack' % str(pack1_sha)) with PackData.from_file(open(path, 'rb'), os.path.getsize(path)) as p: pass
def store(): p = PackData.from_file(StringIO(fileContents.getvalue()), fileContents.tell()) for obj in PackInflater.for_pack_data(p): self.add_object(obj)