def test_01(self): cs = Changeset() assert cs.id == None cs.author = u'annakarenina' cs.save() out = Changeset.find_one({'_id': cs.id}) assert cs.author == u'annakarenina' assert cs.message == None assert cs.timestamp assert cs.manifest == []
def test_02(self): co = ChangeObject() objectid = (u'package', 1, 2, 3) co.object_id = objectid co.operation = ChangeObject.OperationType.CREATE co.data = json.dumps({ 'field1': 'aaaaaa', 'field2': 'bbbbbb' }, sort_keys=True) cs = Changeset() cs.manifest.append(co) cs.author = 'xyz' cs.save() changeobjs = list(ChangeObject.find()) assert len(changeobjs) == 1 co = changeobjs[0] assert co.changeset.id == cs.id out = Changeset.by_id(cs.id) assert len(out.manifest) == 1 assert out.manifest[0].object_id == co.object_id
def test_03_changeset_auto_created(self): c1 = Changeset(author='me') e1 = Entry(name='infinitejest', label='abc') e1.save(c1) out = Changeset.youngest() co = out.manifest[0] assert co.object_id == ['entry', e1.id] assert co.data['name'] == 'infinitejest' oute1 = Entry.by_id(e1.id) oute1.name = 'hamlet' ec2 = Entry(name='horatio') cs2 = Changeset(author='you') oute1.save(cs2) ec2.save(cs2) out = Changeset.youngest() assert len(out.manifest) == 2 co = out.manifest[0] assert co.object_id == ['entry', e1.id] assert co.data['name'] == 'hamlet'
def __init__(self, dataset_name, unique_keys, label, description=u'', metadata=None, currency=u'gbp', time_axis='time.from.year', changeset=None): '''\ Constructs a Loader for the :class:`openspending.model.Dataset` `dataset_name`. Calling the constructor creates or updates the `Dataset` object with `dataset_name`, `label`, `description`, `metadata` and `currency`. The Loader instance can only be used to create :class:`openspending.model.Entry` objects with the same set of `unique_keys`. If you need to create another type of ``Entry`` objects instantiate another ``Loader``. ``dataset_name`` The unique name for the dataset. ``unique_keys`` The keys for which all entries in the dataset are unique. For example if you have a entries with payments that have are identifiable by a *department* and a *consecutive number* that is unique within the *department*, you would pass in a list with the keys ``['department', 'consecutive_number']``. ``label`` A label for the dataset that can be presented to the user ``description`` A description for the dataset taht can be presented to the user. ``metadata`` A ``dict`` with metadata that will be saved on the dataset. ``currency`` The default currency for the entries in the dataset. An individual currency can be set in :meth:`create_entry`. The currenty is stored in upper case. ``time_axis`` The time axis of the dataset. This is the time range for which all entries in the dataset can be analized. The default is 'time.from.year' and should not be changed. fixme: add details and move possible values into constants in model.dataset. ``changeset`` A :class:`openspending.model.Changeset` object. This is only required if you use load a dataset with more than one loader. If you want to add manual changes to the changeset of your loader you can retrive the changeset with *.changeset*. Raises: ``AssertionError`` if more than one dataset with the name ``dataset_name`` exists already. ``ValueError`` If and duplicated :class:`openspending.model.Entry` object is found (The entry has the same values for the ``unique_keys``) or two :class:`model.class.Entity` objects are found with the same name. ''' assert isinstance(dataset_name, unicode) assert isinstance(unique_keys, list) check_rest_suffix(dataset_name) # create a changeset: if changeset is None: name = dataset_name if label: name = "%s (%s)" % (name, label) message = ('Load dataset %s. currency: %s, time axis: %s' % (name, currency, time_axis)) changeset = Changeset() changeset.author = 'system' changeset.message = message changeset.save() self.changeset = changeset # get the dataset q = {'name': dataset_name} dataset_count = Dataset.find(q).count() if dataset_count == 0: operation = CREATE elif dataset_count == 1: operation = UPDATE else: raise AssertionError("Ambiguous dataset name: %s" % dataset_name) data = {"label": label, "currency": currency.upper(), "description": description, "time_axis": time_axis} if metadata is not None: data.update(metadata) Dataset.c.update(q, {"$set": data}, upsert=True) self.dataset = Dataset.find_one(q) self._add_changeobj(Dataset.c.name, self.dataset.id, self.dataset, operation) self.base_query = {"dataset._id": self.dataset.id} # caches self.entity_cache = {} self.classifier_cache = {} self.unique_keys = unique_keys # We need indexes to speed up lookups and updates self.ensure_index(Entry, ['dataset._id']) self.ensure_index(Entry, ['dataset.name']) self.ensure_index(Entry, ['classifiers']) self.ensure_index(Entry, ['entities']) self.ensure_index(Entry, ['from._id']) self.ensure_index(Entry, ['to._id']) self.ensure_index(Entry, ['to._id', 'from._id', 'amount']) self.ensure_index(Classifier, ['taxonomy', 'name']) self.ensure_index(Dimension, ['dataset', 'key']) self.ensure_index(Entity, ['name']) # fixme: The entry.name index might be dropped when Base.by_id() # changes. The 'name' field for entries is not interesting. self.ensure_index(Entry, ['name']) # Make sure entries and entities are unique self.existing_entries = self._ensure_unique(Entry, self.unique_keys, self.base_query) self._ensure_unique(Entity, ['name']) # info's needed to print statistics during the run self.num_entries = 0 self.start_time = None