Beispiel #1
0
def iter_fixture_items_for_data_type(domain, data_type_id):
    from corehq.apps.fixtures.models import FixtureDataItem
    for row in paginate_view(FixtureDataItem.get_db(),
                             'fixtures/data_items_by_domain_type',
                             chunk_size=1000,
                             startkey=[domain, data_type_id],
                             endkey=[domain, data_type_id, {}],
                             reduce=False,
                             include_docs=True):
        yield FixtureDataItem.wrap(row['doc'])
Beispiel #2
0
def _import_fixtures(domain):
    for fixture_name, filename in [
        ('recipes', 'recipes.csv'),
        ('conv_factors', 'conv_factors.csv'),
        ('food_list', 'food_list.csv'),
        ('food_composition_table', 'food_composition_table.csv'),
    ]:
        fields, rows = _read_csv(filename)
        data_type = FixtureDataType(
            domain=domain,
            tag=fixture_name,
            fields=[FixtureTypeField(field_name=field) for field in fields],
        )
        data_type.save()

        with IterDB(FixtureDataItem.get_db(), chunksize=1000) as iter_db:
            for vals in rows:
                fixture_data_item = _mk_fixture_data_item(
                    domain, data_type._id, fields, vals)
                iter_db.save(fixture_data_item)
Beispiel #3
0
def _run_fast_fixture_upload(domain, workbook, task=None):
    """This upload should be much faster than the default _run_fixture_upload with the following trade-offs:

    * Does not support any fixture ownership. All fixtures must be global
    * Manually creates the JSON documents instead of using models
    * Delays all fixture item deletes to an asynchronous task
    * Creates tables one by one instead of attempting an "all or nothing" approach
    """
    return_val = FixtureUploadResult()

    type_sheets = workbook.get_all_type_sheets()
    for table_definition in type_sheets:
        if not table_definition.is_global:
            return_val.errors.append(
                _("type {lookup_table_name} is not defined as global").format(
                    lookup_table_name=table_definition.table_id
                )
            )
            return return_val
    total_tables = len(type_sheets)
    return_val.number_of_fixtures = total_tables

    def _update_progress(table_count, item_count, items_in_table):
        if task:
            processed = table_count * 10 + (10 * item_count / items_in_table)
            DownloadBase.set_progress(task, processed, 10 * total_tables)

    existing_data_types_by_tag = {
        data_type.tag: data_type
        for data_type in FixtureDataType.by_domain(domain)
    }
    for table_number, table_def in enumerate(type_sheets):
        data_type = {
            "_id": uuid.uuid4().hex,
            "doc_type": "FixtureDataType",
            "domain": domain,
            "is_global": True,
            "description": None,
            "fields": [field.to_json() for field in table_def.fields],
            "copy_from": None,
            "tag": table_def.table_id,
            "item_attributes": table_def.item_attributes
        }

        data_item_docs_to_save = []
        data_items = list(workbook.get_data_sheet(data_type['tag']))
        items_in_table = len(data_items)
        for sort_key, di in enumerate(data_items):
            _update_progress(table_number, sort_key, items_in_table)
            type_fields = table_def.fields
            item_fields = {
                field.field_name: _process_item_field(field, di).to_json()
                for field in type_fields
            }

            item_attributes = di.get('property', {})
            data_item = {
                "_id": uuid.uuid4().hex,
                "doc_type": "FixtureDataItem",
                "domain": domain,
                "sort_key": sort_key,
                "fields": item_fields,
                "data_type_id": data_type['_id'],
                "item_attributes": item_attributes
            }
            data_item_docs_to_save.append(data_item)

        # save all the data items in the fixture, before the data type
        # This ensure that all data items are created before the data type is created
        # which could result in partial table upload
        try:
            for docs in chunked(data_item_docs_to_save, 1000):
                FixtureDataItem.get_db().save_docs(docs)
        except (BulkSaveError, HTTPError):
            return_val.errors.append(
                _("Error occurred while creating {lookup_table_name}. This table was not created").format(
                    lookup_table_name=data_type['tag']
                )
            )
            continue

        data_type_docs = [data_type]
        existing_data_type = existing_data_types_by_tag.get(data_type['tag'])
        if existing_data_type:
            # delete the old data type in the same request
            data_type_docs.append({
                "_id": existing_data_type._id,
                "_rev": existing_data_type._rev,
                "_deleted": True
            })

        # the following save_docs can result in two issues:
        # * the delete fails, new doc save succeeds meaning that there are two data types with the same tag
        # * the delete succeeds, new doc save fails meaning that there is no data type with the desired tag
        try:
            FixtureDataType.get_db().save_docs(data_type_docs)
        except (BulkSaveError, HTTPError):
            return_val.errors.append(
                _("Error occurred while creating {lookup_table_name}. This table was not created").format(
                    lookup_table_name=data_type['tag']
                )
            )
            continue
        if existing_data_type:
            return_val.messages.append(
                _("Pre-existing definition of {lookup_table_name} deleted").format(
                    lookup_table_name=existing_data_type.tag
                )
            )
        return_val.messages.append(
            _("Table {lookup_table_name} successfully uploaded").format(lookup_table_name=data_type['tag']),
        )

        if existing_data_type:
            from corehq.apps.fixtures.tasks import delete_unneeded_fixture_data_item
            # delay removing data items for the previously delete type as that requires a
            # couch view hit which introduces another opportunity for failure
            delete_unneeded_fixture_data_item.delay(domain, existing_data_type._id)
            clear_fixture_quickcache(domain, [existing_data_type])
        clear_fixture_cache(domain)

    return return_val
Beispiel #4
0
def delete_fixture_items_for_data_type(domain, data_type_id):
    from corehq.apps.fixtures.models import FixtureDataItem
    iter_bulk_delete(FixtureDataItem.get_db(), [
        i["_id"]
        for i in iter_fixture_items_for_data_type(domain, data_type_id)
    ])