Exemple #1
0
    def unpack(cls, name, data):
        """
        Create a  :class:`View` instance from a view ``name``
        and a datastructure ``data`` (like it is created by
        :meth:`pack`). Internal mehtod, no API.

        """
        dataset = Dataset.find_one({'name': data.get('dataset')})
        return cls(dataset, name, data.get('label'), data.get('dimension'),
                   drilldown=data.get('drilldown'),
                   cuts=data.get('cuts', {}))
 def test_successful_import(self):
     data = csv_fixture("successful_import")
     model = csv_fixture_model()
     importer = CSVImporter(data, model)
     importer.run()
     dataset = Dataset.find_one()
     h.assert_true(dataset is not None, "Dataset should not be None")
     h.assert_equal(dataset.name, "test-csv")
     entries = list(Entry.find({"dataset.name": dataset.name}))
     h.assert_equal(len(entries), 4)
     entry = Entry.find_one({"provenance.line": 2})
     h.assert_true(entry is not None, "Entry with name could not be found")
     h.assert_equal(entry.amount, 130000.0)
    def test_successful_import_with_simple_testdata(self):
        data = csv_fixture("simple")
        model = csv_fixture_model(name="simple")
        importer = CSVImporter(data, model)
        importer.run()
        h.assert_equal(importer.errors, [])

        dataset = Dataset.find_one()
        h.assert_true(dataset is not None, "Dataset should not be None")

        entries = list(Entry.find({"dataset.name": dataset.name}))
        h.assert_equal(len(entries), 5)

        entry = entries[0]
        h.assert_equal(entry["from"]["label"], "Test From")
        h.assert_equal(entry["to"]["label"], "Test To")
        h.assert_equal(entry["time"]["unparsed"], "2010-01-01")
        h.assert_equal(entry["amount"], 100.00)
Exemple #4
0
    def test_entry_custom_html(self):
        assert self.dat.entry_custom_html is None
        self.dat.entry_custom_html = '<span>custom html</span>'
        self.dat.save()

        assert Dataset.find_one().entry_custom_html == '<span>custom html</span>'
Exemple #5
0
 def _make_dataset(self, name='testdataset'):
     dataset = Dataset.find_one(name)
     if dataset is None:
         dataset = Dataset(name=name)
         dataset.save()
     return dataset
    def __init__(self, dataset_name, unique_keys, label, description=u'',
                 metadata=None, currency=u'gbp', time_axis='time.from.year',
                 changeset=None):
        '''\
        Constructs a Loader for the :class:`openspending.model.Dataset`
        `dataset_name`. Calling the constructor creates or updates the
        `Dataset` object with `dataset_name`, `label`, `description`,
        `metadata` and `currency`. The Loader instance can only be used
        to create :class:`openspending.model.Entry` objects with the same set
        of `unique_keys`. If you need to create another type of
        ``Entry`` objects instantiate another ``Loader``.

        ``dataset_name``
            The unique name for the dataset.
        ``unique_keys``
            The keys for which all entries in the dataset are unique.
            For example if you have a entries with payments that have
            are identifiable by a *department* and a *consecutive number*
            that is unique within the *department*, you would pass in
            a list with the keys ``['department', 'consecutive_number']``.
        ``label``
            A label for the dataset that can be presented to the user
        ``description``
            A description for the dataset taht can be presented
            to the user.
        ``metadata``
            A ``dict`` with metadata that will be saved on the dataset.
        ``currency``
            The default currency for the entries in the dataset. An
            individual currency can be set in :meth:`create_entry`.
            The currenty is stored in upper case.
        ``time_axis``
            The time axis of the dataset. This is the time range for which
            all entries in the dataset can be analized. The default is
            'time.from.year' and should not be changed.
            fixme: add details and move possible values into constants in
            model.dataset.
         ``changeset``
            A :class:`openspending.model.Changeset` object. This is only required
            if you use load a dataset with more than one loader. If you
            want to add manual changes to the changeset of your loader
            you can retrive the changeset with *.changeset*.

        Raises:
            ``AssertionError`` if more than one dataset with the name
                ``dataset_name`` exists already.
            ``ValueError``
                If and duplicated :class:`openspending.model.Entry` object
                is found (The entry has the same values for the
                ``unique_keys``) or two :class:`model.class.Entity`
                objects are found with the same name.
        '''
        assert isinstance(dataset_name, unicode)
        assert isinstance(unique_keys, list)
        check_rest_suffix(dataset_name)

        # create a changeset:
        if changeset is None:
            name = dataset_name
            if label:
                name = "%s (%s)" % (name, label)
            message = ('Load dataset %s. currency: %s, time axis: %s' %
                       (name, currency, time_axis))
            changeset = Changeset()
            changeset.author = 'system'
            changeset.message = message
            changeset.save()
        self.changeset = changeset

        # get the dataset
        q = {'name': dataset_name}
        dataset_count = Dataset.find(q).count()
        if dataset_count == 0:
            operation = CREATE
        elif dataset_count == 1:
            operation = UPDATE
        else:
            raise AssertionError("Ambiguous dataset name: %s" % dataset_name)
        data = {"label": label,
                "currency": currency.upper(),
                "description": description,
                "time_axis": time_axis}
        if metadata is not None:
            data.update(metadata)
        Dataset.c.update(q, {"$set": data}, upsert=True)
        self.dataset = Dataset.find_one(q)
        self._add_changeobj(Dataset.c.name, self.dataset.id, self.dataset,
                            operation)
        self.base_query = {"dataset._id": self.dataset.id}

        # caches
        self.entity_cache = {}
        self.classifier_cache = {}
        self.unique_keys = unique_keys

        # We need indexes to speed up lookups and updates
        self.ensure_index(Entry, ['dataset._id'])
        self.ensure_index(Entry, ['dataset.name'])
        self.ensure_index(Entry, ['classifiers'])
        self.ensure_index(Entry, ['entities'])
        self.ensure_index(Entry, ['from._id'])
        self.ensure_index(Entry, ['to._id'])
        self.ensure_index(Entry, ['to._id', 'from._id', 'amount'])
        self.ensure_index(Classifier, ['taxonomy', 'name'])
        self.ensure_index(Dimension, ['dataset', 'key'])
        self.ensure_index(Entity, ['name'])
        # fixme: The entry.name index might be dropped when Base.by_id()
        #        changes. The 'name' field for entries is not interesting.
        self.ensure_index(Entry, ['name'])

        # Make sure entries and entities are unique
        self.existing_entries = self._ensure_unique(Entry, self.unique_keys,
                                                    self.base_query)
        self._ensure_unique(Entity, ['name'])

        # info's needed to print statistics during the run
        self.num_entries = 0
        self.start_time = None