def test_transitive_keep(self): with Connector().create(self.store1, "public.data", "test.ignore") as w1: with Connector().create(self.store1, "public.data", "test.ignore") as w2: w2.write('FILE', 'test') w2.commit() doc2 = w2.getDoc() rev2 = w2.getRev() # create a reference from w1 to w2 w1.write('PDSD', struct.dumps([struct.DocLink(self.store2, doc2)])) w1.commit() doc1 = w1.getDoc() rev1 = w1.getRev() # w2 is closed now, w1 still open, should prevent gc self.gc(self.store1) l = Connector().lookupDoc(doc1) self.assertEqual(l.revs(), [rev1]) self.assertEqual(l.preRevs(), []) self.assertEqual(Connector().lookupRev(rev1), [self.store1]) l = Connector().lookupDoc(doc2) self.assertEqual(l.revs(), [rev2]) self.assertEqual(l.preRevs(), []) self.assertEqual(Connector().lookupRev(rev2), [self.store1])
def save(self): if not self.__changed: return rules = [ rule for rule in self.__rules.values() ] with Connector().update(self.sysStore, self.syncDoc, self.syncRev) as w: w.writeAll('PDSD', struct.dumps(rules)) w.commit() self.rev = w.getRev()
def docMergePerform(self, writer, baseReader, mergeReaders, changedParts): conflicts = super(FolderWidget, self).docMergePerform(writer, baseReader, mergeReaders, changedParts) if 'PDSD' in changedParts: basePdsd = struct.loads(self.store(), baseReader.readAll('PDSD')) mergePdsd = [] for r in mergeReaders: mergePdsd.append(struct.loads(self.store(), r.readAll('PDSD'))) (newPdsd, newConflict) = struct.merge(basePdsd, mergePdsd) conflicts = conflicts or newConflict writer.writeAll('PDSD', struct.dumps(newPdsd)) return conflicts
def test_sync_merge(self): (doc, rev1, rev2) = self.createMerge( "org.peerdrive.folder", {"META": struct.dumps({"a": 1}), "PDSD": struct.dumps([{"": 1}, {"": 2}])}, {"META": struct.dumps({"a": 4, "b": 2}), "PDSD": struct.dumps([{"": 1}, {"": 2}, {"": 3}])}, {"META": struct.dumps({"a": 1, "c": 3}), "PDSD": struct.dumps([{"": 2}])}, ) l = self.performSync(doc, "merge") rev = l.revs()[0] s = Connector().stat(rev) self.assertEqual(len(s.parents()), 2) self.assertTrue(rev1 in s.parents()) self.assertTrue(rev2 in s.parents()) # all revs on all stores? l = Connector().lookupRev(rev1) self.assertTrue(self.store1 in l) self.assertTrue(self.store2 in l) l = Connector().lookupRev(rev2) self.assertTrue(self.store1 in l) self.assertTrue(self.store2 in l) # see if merge was ok with Connector().peek(self.store1, rev) as r: meta = struct.loads(self.store1, r.readAll("META")) if "org.peerdrive.annotation" in meta: del meta["org.peerdrive.annotation"] self.assertEqual(meta, {"a": 4, "b": 2, "c": 3}) pdsd = sorted(struct.loads(self.store1, r.readAll("PDSD"))) self.assertEqual(pdsd, [{"": 2}, {"": 3}])
def setColumnData(self, index, data): if self.__doc and (self.__metaData is not None): meta = copy.deepcopy(self.__metaData) # make sure we can revert! self.__columnDefs[index].update(meta, data) try: with Connector().update(self.__store, self.__doc, self.__rev) as w: w.writeAll('META', struct.dumps(meta)) w.commit() self.__rev = w.getRev() self.__metaData = meta self.__columnValues[index] = data return True except IOError: pass return False
def test_sticky(self): # create the document which should get replicated w = self.create(self.store1) w.writeAll("FILE", "foobar") w.commit() doc = w.getDoc() rev = w.getRev() # create sticky contianer on first store s = struct.Folder() with s.create(self.store1, "foo") as dummy: s.append(struct.DocLink(self.store1, doc)) s.save() contDoc = s.getDoc() # need a dummy folder on both stores self.createCommon( [self.store1, self.store2], "org.peerdrive.folder", data={"PDSD": struct.dumps([{"": struct.DocLink(self.store1, contDoc)}])}, ) watch1 = self.watchDoc(doc, connector.Watch.EVENT_REPLICATED) watch2 = self.watchRev(rev, connector.Watch.EVENT_REPLICATED) # now replicate the folder to 2nd store Connector().replicateDoc(self.store1, contDoc, self.store2) # wait for sticky replicatin to happen self.assertTrue(watch1.waitForWatch()) self.assertTrue(watch2.waitForWatch()) # check doc (with rev) to exist on all stores l = Connector().lookupDoc(doc) self.assertEqual(l.revs(), [rev]) self.assertEqual(len(l.stores(rev)), 2) self.assertTrue(self.store1 in l.stores(rev)) self.assertTrue(self.store2 in l.stores(rev)) l = Connector().lookupRev(rev) self.assertEqual(len(l), 2) self.assertTrue(self.store1 in l) self.assertTrue(self.store2 in l)
def createCommon(self, stores, type="public.data", creator="org.peerdrive.test-py", data={}): leadStore = stores.pop() w = self.create(leadStore, type, creator) for (part, blob) in data.items(): w.writeAll(part, blob) w.commit() doc = w.getDoc() rev = w.getRev() for store in stores: w = self.create(store) w.writeAll('PDSD', struct.dumps([struct.DocLink(store, doc)])) w.commit() Connector().replicateDoc(leadStore, doc, store) # verify the common document on all stores l = Connector().lookupDoc(doc) self.assertEqual(l.revs(), [rev]) for store in stores: self.assertTrue(store in l.stores()) return (doc, rev)
def test_sticky(self): # create the document which should get replicated w = self.create(self.store1) w.writeAll('FILE', "foobar") w.commit() doc = w.getDoc() rev = w.getRev() # create sticky contianer on first store s = struct.Folder() with s.create(self.store1, "foo") as dummy: s.append(struct.DocLink(self.store1, doc)) s.save() contDoc = s.getDoc() # need a dummy folder on both stores self.createCommon([self.store1, self.store2], "org.peerdrive.folder", data={'PDSD' : struct.dumps([{'':struct.DocLink(self.store1, contDoc)}])}) watch1 = self.watchDoc(doc, connector.Watch.EVENT_REPLICATED) watch2 = self.watchRev(rev, connector.Watch.EVENT_REPLICATED) # now replicate the folder to 2nd store Connector().replicateDoc(self.store1, contDoc, self.store2) # wait for sticky replicatin to happen self.assertTrue(watch1.waitForWatch()) self.assertTrue(watch2.waitForWatch()) # check doc (with rev) to exist on all stores l = Connector().lookupDoc(doc) self.assertEqual(l.revs(), [rev]) self.assertEqual(len(l.stores(rev)), 2) self.assertTrue(self.store1 in l.stores(rev)) self.assertTrue(self.store2 in l.stores(rev)) l = Connector().lookupRev(rev) self.assertEqual(len(l), 2) self.assertTrue(self.store1 in l) self.assertTrue(self.store2 in l)
def __save(self): self.__buttonBox.button(QtGui.QDialogButtonBox.Save).setEnabled(False) with Connector().peek(self.__store, self.__rev) as r: metaData = struct.loads(self.__store, r.readAll('META')) setMetaData(metaData, ["org.peerdrive.annotation", "title"], self.__annoTab.getTitle()) setMetaData(metaData, ["org.peerdrive.annotation", "description"], self.__annoTab.getDescription()) tagString = self.__annoTab.getTags() if tagString is not None: tagList = [ tag.strip() for tag in tagString.split(',')] tagList = [ tag for tag in tagList if tag != '' ] tagList = list(set(tagList)) setMetaData(metaData, ["org.peerdrive.annotation", "tags"], tagList) with Connector().update(self.__store, self.__doc, self.__rev) as writer: writer.writeAll('META', struct.dumps(metaData)) writer.commit() self.__rev = writer.getRev() self.__switchStore(self.__store)
def test_sync_merge(self): (doc, rev1, rev2) = self.createMerge("org.peerdrive.folder", { 'META':struct.dumps({"a":1}), 'PDSD':struct.dumps([{'':1}, {'':2}]) }, { 'META':struct.dumps({"a":4, "b":2}), 'PDSD':struct.dumps([{'':1}, {'':2}, {'':3}]) }, { 'META':struct.dumps({"a":1, "c":3}), 'PDSD':struct.dumps([{'':2}]) }) l = self.performSync(doc, 'merge') rev = l.revs()[0] s = Connector().stat(rev) self.assertEqual(len(s.parents()), 2) self.assertTrue(rev1 in s.parents()) self.assertTrue(rev2 in s.parents()) # all revs on all stores? l = Connector().lookupRev(rev1) self.assertTrue(self.store1 in l) self.assertTrue(self.store2 in l) l = Connector().lookupRev(rev2) self.assertTrue(self.store1 in l) self.assertTrue(self.store2 in l) # see if merge was ok with Connector().peek(self.store1, rev) as r: meta = struct.loads(self.store1, r.readAll('META')) if 'org.peerdrive.annotation' in meta: del meta['org.peerdrive.annotation'] self.assertEqual(meta, {"a":4, "b":2, "c":3}) pdsd = sorted(struct.loads(self.store1, r.readAll('PDSD'))) self.assertEqual(pdsd, [{'':2},{'':3}])
def doSave(self, handle): data = [ item.getItem() for item in self._listing ] handle.writeAll('PDSD', struct.dumps(data)) self.__changedContent = False
<content> = local file | json data """ sys.exit(1) # === main if len(sys.argv) < 4: usage() # parse command line objPath = sys.argv[1] objUti = sys.argv[2] objSpec = [] for spec in sys.argv[3:]: if spec[4] != ':': usage() fourCC = spec[:4] content = spec[5:] if os.path.isfile(content): with open(content, "rb") as file: part = file.read() else: part = struct.dumps(struct.loadJSON(content)) objSpec.append((fourCC, part)) # let's do it if not importObjectByPath(objPath, objUti, objSpec): print "Import failed"