Exemplo n.º 1
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    type_sheets = workbook.get_all_type_sheets()
    total_tables = len(type_sheets)
    return_val.number_of_fixtures = total_tables

    def _update_progress(table_count, item_count, items_in_table):
        if task:
            processed = table_count * 10 + (10 * item_count / items_in_table)
            DownloadBase.set_progress(task, processed, 10 * total_tables)

    with CouchTransaction() as transaction:
        for table_number, table_def in enumerate(type_sheets):
            data_type, delete, err = _create_data_type(domain, table_def,
                                                       replace, transaction)
            return_val.errors.extend(err)
            if delete:
                continue
            transaction.save(data_type)
            data_types.append(data_type)

            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {
                    field.field_name: _process_item_field(field, di)
                    for field in type_fields
                }

                item_attributes = di.get('property', {})
                old_data_item, delete, err = _process_data_item(
                    domain, replace, data_type, di, item_fields,
                    item_attributes, sort_key)
                return_val.errors.extend(err)
                if delete:
                    old_data_item.recursive_delete(transaction)
                    continue
                transaction.save(old_data_item)

                err = _process_group_ownership(di, old_data_item,
                                               group_memoizer, transaction)
                return_val.errors.extend(err)

                err = _process_user_ownership(di, old_data_item, transaction)
                return_val.errors.extend(err)

                err = _process_location_ownership(di, old_data_item,
                                                  get_location, transaction)
                return_val.errors.extend(err)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Exemplo n.º 2
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    type_sheets = workbook.get_all_type_sheets()
    total_tables = len(type_sheets)
    return_val.number_of_fixtures = total_tables

    def _update_progress(table_count, item_count, items_in_table):
        if task:
            processed = table_count * 10 + (10 * item_count / items_in_table)
            DownloadBase.set_progress(task, processed, 10 * total_tables)

    with CouchTransaction() as transaction:
        for table_number, table_def in enumerate(type_sheets):
            data_type, delete, err = _create_data_type(domain, table_def, replace, transaction)
            return_val.errors.extend(err)
            if delete:
                continue
            transaction.save(data_type)
            data_types.append(data_type)

            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {
                    field.field_name: _process_item_field(field, di)
                    for field in type_fields
                }

                item_attributes = di.get('property', {})
                old_data_item, delete, err = _process_data_item(
                    domain, replace, data_type, di, item_fields, item_attributes, sort_key)
                return_val.errors.extend(err)
                if delete:
                    old_data_item.recursive_delete(transaction)
                    continue
                transaction.save(old_data_item)

                err = _process_group_ownership(di, old_data_item, group_memoizer, transaction)
                return_val.errors.extend(err)

                err = _process_user_ownership(di, old_data_item, transaction)
                return_val.errors.extend(err)

                err = _process_location_ownership(di, old_data_item, get_location, transaction)
                return_val.errors.extend(err)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Exemplo n.º 3
0
    def test_update_global_only(self):
        other_table = FixtureDataType(
            domain=self.domain,
            tag='jellyfish',
            is_global=False,
            fields=[
                FixtureTypeField(field_name="genus"),
                FixtureTypeField(field_name="species"),
            ],
        )
        other_table.save()
        clear_fixture_quickcache(self.domain,
                                 get_fixture_data_types(self.domain))
        clear_fixture_cache(self.domain)

        with self.assertRaises(UnsupportedActionError):
            update_fixture(self.domain_link, 'jellyfish')
Exemplo n.º 4
0
def update_fixture(domain_link, tag):
    if domain_link.is_remote:
        master_results = remote_fixture(domain_link, tag)
    else:
        master_results = local_fixture(domain_link.master_domain, tag)

    master_data_type = master_results["data_type"]
    if not master_data_type.is_global:
        raise UnsupportedActionError(
            _("Found non-global lookup table '{}'.").format(
                master_data_type.tag))

    # Update data type
    master_data_type = master_data_type.to_json()
    del master_data_type["_id"]
    del master_data_type["_rev"]

    linked_data_type = get_fixture_data_type_by_tag(domain_link.linked_domain,
                                                    master_data_type["tag"])
    if linked_data_type:
        linked_data_type = linked_data_type.to_json()
    else:
        linked_data_type = {}
    linked_data_type.update(master_data_type)
    linked_data_type["domain"] = domain_link.linked_domain
    linked_data_type = FixtureDataType.wrap(linked_data_type)
    linked_data_type.save()
    clear_fixture_quickcache(domain_link.linked_domain, [linked_data_type])

    # Re-create relevant data items
    delete_fixture_items_for_data_type(domain_link.linked_domain,
                                       linked_data_type._id)
    for master_item in master_results["data_items"]:
        doc = master_item.to_json()
        del doc["_id"]
        del doc["_rev"]
        doc["domain"] = domain_link.linked_domain
        doc["data_type_id"] = linked_data_type._id
        FixtureDataItem.wrap(doc).save()

    clear_fixture_cache(domain_link.linked_domain)
Exemplo n.º 5
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10 * item_count / items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=tag,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes
            )
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(
                        _("'%(UID)s' is not a valid UID. But the new type is created.")
                        % {'UID': table_def.uid}
                    )
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError):
                data_type = new_data_type
            transaction.save(data_type)
            data_types.append(data_type)
            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(
                            field_list=[FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=six.text_type(di['field'][field.field_name]),
                                properties={}
                            )]
                        )
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=six.text_type(field_prop_combos[x]),
                                properties={prop: six.text_type(prop_dict[prop][x]) for prop in prop_dict}
                            )
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list
                        )

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key
                )
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain \
                            or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(
                            _("'%(UID)s' is not a valid UID. But the new item is created.")
                            % {'UID': di['UID']}
                        )
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)
                old_locations = old_data_item.locations
                for location in old_locations:
                    old_data_item.remove_location(location)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(
                            _("Unknown group: '%(name)s'. But the row is successfully added")
                            % {'name': group_name}
                        )

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username), domain)
                    except ValidationError:
                        return_val.errors.append(
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username}
                        )
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(
                            _("Unknown user: '******'. But the row is successfully added")
                            % {'name': raw_username}
                        )

                for name in di.get('location', []):
                    location_cache = get_location(name)
                    if location_cache.is_error:
                        return_val.errors.append(location_cache.message)
                    else:
                        old_data_item.add_location(location_cache.location,
                                                   transaction=transaction)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Exemplo n.º 6
0
def _run_fast_fixture_upload(domain, workbook, task=None):
    """This upload should be much faster than the default _run_fixture_upload with the following trade-offs:

    * Does not support any fixture ownership. All fixtures must be global
    * Manually creates the JSON documents instead of using models
    * Delays all fixture item deletes to an asynchronous task
    * Creates tables one by one instead of attempting an "all or nothing" approach
    """
    return_val = FixtureUploadResult()

    type_sheets = workbook.get_all_type_sheets()
    for table_definition in type_sheets:
        if not table_definition.is_global:
            return_val.errors.append(
                _("type {lookup_table_name} is not defined as global").format(
                    lookup_table_name=table_definition.table_id
                )
            )
            return return_val
    total_tables = len(type_sheets)
    return_val.number_of_fixtures = total_tables

    def _update_progress(table_count, item_count, items_in_table):
        if task:
            processed = table_count * 10 + (10 * item_count / items_in_table)
            DownloadBase.set_progress(task, processed, 10 * total_tables)

    existing_data_types_by_tag = {
        data_type.tag: data_type
        for data_type in FixtureDataType.by_domain(domain)
    }
    for table_number, table_def in enumerate(type_sheets):
        data_type = {
            "_id": uuid.uuid4().hex,
            "doc_type": "FixtureDataType",
            "domain": domain,
            "is_global": True,
            "description": None,
            "fields": [field.to_json() for field in table_def.fields],
            "copy_from": None,
            "tag": table_def.table_id,
            "item_attributes": table_def.item_attributes
        }

        data_item_docs_to_save = []
        data_items = list(workbook.get_data_sheet(data_type['tag']))
        items_in_table = len(data_items)
        for sort_key, di in enumerate(data_items):
            _update_progress(table_number, sort_key, items_in_table)
            type_fields = table_def.fields
            item_fields = {
                field.field_name: _process_item_field(field, di).to_json()
                for field in type_fields
            }

            item_attributes = di.get('property', {})
            data_item = {
                "_id": uuid.uuid4().hex,
                "doc_type": "FixtureDataItem",
                "domain": domain,
                "sort_key": sort_key,
                "fields": item_fields,
                "data_type_id": data_type['_id'],
                "item_attributes": item_attributes
            }
            data_item_docs_to_save.append(data_item)

        # save all the data items in the fixture, before the data type
        # This ensure that all data items are created before the data type is created
        # which could result in partial table upload
        try:
            for docs in chunked(data_item_docs_to_save, 1000):
                FixtureDataItem.get_db().save_docs(docs)
        except (BulkSaveError, HTTPError):
            return_val.errors.append(
                _("Error occurred while creating {lookup_table_name}. This table was not created").format(
                    lookup_table_name=data_type['tag']
                )
            )
            continue

        data_type_docs = [data_type]
        existing_data_type = existing_data_types_by_tag.get(data_type['tag'])
        if existing_data_type:
            # delete the old data type in the same request
            data_type_docs.append({
                "_id": existing_data_type._id,
                "_rev": existing_data_type._rev,
                "_deleted": True
            })

        # the following save_docs can result in two issues:
        # * the delete fails, new doc save succeeds meaning that there are two data types with the same tag
        # * the delete succeeds, new doc save fails meaning that there is no data type with the desired tag
        try:
            FixtureDataType.get_db().save_docs(data_type_docs)
        except (BulkSaveError, HTTPError):
            return_val.errors.append(
                _("Error occurred while creating {lookup_table_name}. This table was not created").format(
                    lookup_table_name=data_type['tag']
                )
            )
            continue
        if existing_data_type:
            return_val.messages.append(
                _("Pre-existing definition of {lookup_table_name} deleted").format(
                    lookup_table_name=existing_data_type.tag
                )
            )
        return_val.messages.append(
            _("Table {lookup_table_name} successfully uploaded").format(lookup_table_name=data_type['tag']),
        )

        if existing_data_type:
            from corehq.apps.fixtures.tasks import delete_unneeded_fixture_data_item
            # delay removing data items for the previously delete type as that requires a
            # couch view hit which introduces another opportunity for failure
            delete_unneeded_fixture_data_item.delay(domain, existing_data_type._id)
            clear_fixture_quickcache(domain, [existing_data_type])
        clear_fixture_cache(domain)

    return return_val
Exemplo n.º 7
0
def _run_fixture_upload(domain, workbook, replace=False, task=None):
    from corehq.apps.users.bulkupload import GroupMemoizer
    return_val = FixtureUploadResult()
    group_memoizer = GroupMemoizer(domain)
    get_location = get_memoized_location_getter(domain)
    data_types = []

    with CouchTransaction() as transaction:
        type_sheets = workbook.get_all_type_sheets()
        total_tables = len(type_sheets)
        return_val.number_of_fixtures = total_tables

        def _update_progress(table_count, item_count, items_in_table):
            if task:
                processed = table_count * 10 + (10 * item_count /
                                                items_in_table)
                DownloadBase.set_progress(task, processed, 10 * total_tables)

        for table_number, table_def in enumerate(type_sheets):
            tag = table_def.table_id
            new_data_type = FixtureDataType(
                domain=domain,
                is_global=table_def.is_global,
                tag=tag,
                fields=table_def.fields,
                item_attributes=table_def.item_attributes)
            try:
                tagged_fdt = FixtureDataType.fixture_tag_exists(domain, tag)
                if tagged_fdt:
                    data_type = tagged_fdt
                # support old usage with 'UID'
                elif table_def.uid:
                    data_type = FixtureDataType.get(table_def.uid)
                else:
                    data_type = new_data_type

                if replace and data_type != new_data_type:
                    data_type.recursive_delete(transaction)
                    data_type = new_data_type

                data_type.fields = table_def.fields
                data_type.item_attributes = table_def.item_attributes
                data_type.is_global = table_def.is_global
                assert data_type.doc_type == FixtureDataType._doc_type
                if data_type.domain != domain:
                    data_type = new_data_type
                    return_val.errors.append(
                        _("'%(UID)s' is not a valid UID. But the new type is created."
                          ) % {'UID': table_def.uid})
                if table_def.delete:
                    data_type.recursive_delete(transaction)
                    continue
            except (ResourceNotFound, KeyError):
                data_type = new_data_type
            transaction.save(data_type)
            data_types.append(data_type)
            data_items = list(workbook.get_data_sheet(data_type.tag))
            items_in_table = len(data_items)
            for sort_key, di in enumerate(data_items):
                _update_progress(table_number, sort_key, items_in_table)
                type_fields = data_type.fields
                item_fields = {}
                for field in type_fields:
                    # if field doesn't have properties
                    if len(field.properties) == 0:
                        item_fields[field.field_name] = FieldList(field_list=[
                            FixtureItemField(
                                # using unicode here, to cast ints, and multi-language strings
                                field_value=six.text_type(di['field'][
                                    field.field_name]),
                                properties={})
                        ])
                    else:
                        field_list = []
                        field_prop_combos = di['field'][field.field_name]
                        prop_combo_len = len(field_prop_combos)
                        prop_dict = di[field.field_name]
                        for x in range(0, prop_combo_len):
                            fix_item_field = FixtureItemField(
                                field_value=six.text_type(
                                    field_prop_combos[x]),
                                properties={
                                    prop: six.text_type(prop_dict[prop][x])
                                    for prop in prop_dict
                                })
                            field_list.append(fix_item_field)
                        item_fields[field.field_name] = FieldList(
                            field_list=field_list)

                item_attributes = di.get('property', {})
                new_data_item = FixtureDataItem(
                    domain=domain,
                    data_type_id=data_type.get_id,
                    fields=item_fields,
                    item_attributes=item_attributes,
                    sort_key=sort_key)
                try:
                    if di['UID'] and not replace:
                        old_data_item = FixtureDataItem.get(di['UID'])
                    else:
                        old_data_item = new_data_item
                        pass
                    old_data_item.fields = item_fields
                    old_data_item.item_attributes = item_attributes
                    if old_data_item.domain != domain \
                            or not old_data_item.data_type_id == data_type.get_id:
                        old_data_item = new_data_item
                        return_val.errors.append(
                            _("'%(UID)s' is not a valid UID. But the new item is created."
                              ) % {'UID': di['UID']})
                    assert old_data_item.doc_type == FixtureDataItem._doc_type
                    if di[DELETE_HEADER] == "Y" or di[DELETE_HEADER] == "y":
                        old_data_item.recursive_delete(transaction)
                        continue
                except (ResourceNotFound, KeyError):
                    old_data_item = new_data_item
                transaction.save(old_data_item)

                old_groups = old_data_item.groups
                for group in old_groups:
                    old_data_item.remove_group(group)
                old_users = old_data_item.users
                for user in old_users:
                    old_data_item.remove_user(user)
                old_locations = old_data_item.locations
                for location in old_locations:
                    old_data_item.remove_location(location)

                for group_name in di.get('group', []):
                    group = group_memoizer.by_name(group_name)
                    if group:
                        old_data_item.add_group(group, transaction=transaction)
                    else:
                        return_val.errors.append(
                            _("Unknown group: '%(name)s'. But the row is successfully added"
                              ) % {'name': group_name})

                for raw_username in di.get('user', []):
                    try:
                        username = normalize_username(str(raw_username),
                                                      domain)
                    except ValidationError:
                        return_val.errors.append(
                            _("Invalid username: '******'. Row is not added")
                            % {'name': raw_username})
                        continue
                    user = CommCareUser.get_by_username(username)
                    if user:
                        old_data_item.add_user(user)
                    else:
                        return_val.errors.append(
                            _("Unknown user: '******'. But the row is successfully added"
                              ) % {'name': raw_username})

                for name in di.get('location', []):
                    location_cache = get_location(name)
                    if location_cache.is_error:
                        return_val.errors.append(location_cache.message)
                    else:
                        old_data_item.add_location(location_cache.location,
                                                   transaction=transaction)

    clear_fixture_quickcache(domain, data_types)
    clear_fixture_cache(domain)
    return return_val
Exemplo n.º 8
0
def update_tables(request, domain, data_type_id, test_patch=None):
    """
    receives a JSON-update patch like following
    {
        "_id":"0920fe1c6d4c846e17ee33e2177b36d6",
        "tag":"growth",
        "view_link":"/a/gsid/fixtures/view_lookup_tables/?table_id:0920fe1c6d4c846e17ee33e2177b36d6",
        "is_global":false,
        "fields":{"genderr":{"update":"gender"},"grade":{}}
    }
    """
    if test_patch is None:
        test_patch = {}
    if data_type_id:
        try:
            data_type = FixtureDataType.get(data_type_id)
        except ResourceNotFound:
            raise Http404()

        assert(data_type.doc_type == FixtureDataType._doc_type)
        assert(data_type.domain == domain)

        if request.method == 'GET':
            return json_response(strip_json(data_type))

        elif request.method == 'DELETE':
            with CouchTransaction() as transaction:
                data_type.recursive_delete(transaction)
            clear_fixture_cache(domain)
            return json_response({})
        elif not request.method == 'PUT':
            return HttpResponseBadRequest()

    if request.method == 'POST' or request.method == "PUT":
        fields_update = test_patch or _to_kwargs(request)
        fields_patches = fields_update["fields"]
        data_tag = fields_update["tag"]
        is_global = fields_update["is_global"]

        # validate tag and fields
        validation_errors = []
        if is_identifier_invalid("{}_list".format(data_tag)):
            validation_errors.append(data_tag)
        for field_name, options in fields_update['fields'].items():
            method = list(options.keys())
            if 'update' in method:
                field_name = options['update']
            if is_identifier_invalid(field_name) and 'remove' not in method:
                validation_errors.append(field_name)
        validation_errors = [_(
            "\"%s\" cannot include special characters, begin or end with a space, "
            "or begin with \"xml\" or a number") % e for e in validation_errors
        ]
        if len(data_tag) < 1 or len(data_tag) > 31:
            validation_errors.append(_("Table ID must be between 1 and 31 characters."))

        if validation_errors:
            return json_response({
                'validation_errors': validation_errors,
                'error_msg': _(
                    "Could not update table because field names were not "
                    "correctly formatted"),
            })

        with CouchTransaction() as transaction:
            if data_type_id:
                data_type = update_types(fields_patches, domain, data_type_id, data_tag, is_global, transaction)
                update_items(fields_patches, domain, data_type_id, transaction)
            else:
                if FixtureDataType.fixture_tag_exists(domain, data_tag):
                    return HttpResponseBadRequest("DuplicateFixture")
                else:
                    data_type = create_types(fields_patches, domain, data_tag, is_global, transaction)
        clear_fixture_cache(domain)
        return json_response(strip_json(data_type))
Exemplo n.º 9
0
def update_tables(request, domain, data_type_id, test_patch=None):
    """
    receives a JSON-update patch like following
    {
        "_id":"0920fe1c6d4c846e17ee33e2177b36d6",
        "tag":"growth",
        "view_link":"/a/gsid/fixtures/view_lookup_tables/?table_id:0920fe1c6d4c846e17ee33e2177b36d6",
        "is_global":false,
        "fields":{"genderr":{"update":"gender"},"grade":{}}
    }
    """
    if test_patch is None:
        test_patch = {}
    if data_type_id:
        try:
            data_type = FixtureDataType.get(data_type_id)
        except ResourceNotFound:
            raise Http404()

        assert (data_type.doc_type == FixtureDataType._doc_type)
        assert (data_type.domain == domain)

        if request.method == 'GET':
            return json_response(strip_json(data_type))

        elif request.method == 'DELETE':
            with CouchTransaction() as transaction:
                data_type.recursive_delete(transaction)
            clear_fixture_cache(domain)
            return json_response({})
        elif not request.method == 'PUT':
            return HttpResponseBadRequest()

    if request.method == 'POST' or request.method == "PUT":
        fields_update = test_patch or _to_kwargs(request)
        fields_patches = fields_update["fields"]
        data_tag = fields_update["tag"]
        is_global = fields_update["is_global"]

        # validate tag and fields
        validation_errors = []
        if is_identifier_invalid("{}_list".format(data_tag)):
            validation_errors.append(data_tag)
        for field_name, options in fields_update['fields'].items():
            method = list(options.keys())
            if 'update' in method:
                field_name = options['update']
            if is_identifier_invalid(field_name) and 'remove' not in method:
                validation_errors.append(field_name)
        validation_errors = [
            _("\"%s\" cannot include special characters, begin or end with a space, "
              "or begin with \"xml\" or a number") % e
            for e in validation_errors
        ]
        if len(data_tag) < 1 or len(data_tag) > 31:
            validation_errors.append(
                _("Table ID must be between 1 and 31 characters."))

        if validation_errors:
            return json_response({
                'validation_errors':
                validation_errors,
                'error_msg':
                _("Could not update table because field names were not "
                  "correctly formatted"),
            })

        with CouchTransaction() as transaction:
            if data_type_id:
                data_type = update_types(fields_patches, domain, data_type_id,
                                         data_tag, is_global, transaction)
                update_items(fields_patches, domain, data_type_id, transaction)
            else:
                if FixtureDataType.fixture_tag_exists(domain, data_tag):
                    return HttpResponseBadRequest("DuplicateFixture")
                else:
                    data_type = create_types(fields_patches, domain, data_tag,
                                             is_global, transaction)
        clear_fixture_cache(domain)
        return json_response(strip_json(data_type))
Exemplo n.º 10
0
    def test_update_fixture(self):
        self.assertEqual([], get_fixture_data_types(self.linked_domain))

        # Update linked domain
        update_fixture(self.domain_link, self.table.tag)

        # Linked domain should now have master domain's table and rows
        linked_types = get_fixture_data_types(self.linked_domain)
        self.assertEqual({'moons'}, {t.tag for t in linked_types})
        self.assertEqual({self.linked_domain},
                         {t.domain
                          for t in linked_types})
        items = get_fixture_items_for_data_type(self.linked_domain,
                                                linked_types[0]._id)
        self.assertEqual({self.linked_domain}, {i.domain for i in items})
        self.assertEqual({linked_types[0]._id},
                         {i.data_type_id
                          for i in items})
        self.assertEqual([
            'Callisto',
            'Europa',
            'Io',
            'Jupiter',
            'Jupiter',
            'Jupiter',
        ],
                         sorted([
                             i.fields[field_name].field_list[0].field_value
                             for i in items for field_name in i.fields.keys()
                         ]))

        # Master domain's table and rows should be untouched
        master_types = get_fixture_data_types(self.domain)
        self.assertEqual({'moons'}, {t.tag for t in master_types})
        self.assertEqual({self.domain}, {t.domain for t in master_types})
        master_items = get_fixture_items_for_data_type(self.domain,
                                                       master_types[0]._id)
        self.assertEqual([
            'Callisto',
            'Europa',
            'Io',
            'Jupiter',
            'Jupiter',
            'Jupiter',
        ],
                         sorted([
                             i.fields[field_name].field_list[0].field_value
                             for i in master_items
                             for field_name in i.fields.keys()
                         ]))

        # Update rows in master table and re-update linked domain
        master_items[-1].delete()  # Callisto
        FixtureDataItem(
            domain=self.domain,
            data_type_id=self.table._id,
            fields={
                'name':
                FieldList(
                    field_list=[FixtureItemField(field_value='Thalassa')]),
                'planet':
                FieldList(field_list=[FixtureItemField(
                    field_value='Neptune')]),
            },
        ).save()
        FixtureDataItem(
            domain=self.domain,
            data_type_id=self.table._id,
            fields={
                'name':
                FieldList(field_list=[FixtureItemField(field_value='Naiad')]),
                'planet':
                FieldList(field_list=[FixtureItemField(
                    field_value='Neptune')]),
            },
        ).save()
        clear_fixture_quickcache(self.domain,
                                 get_fixture_data_types(self.domain))
        clear_fixture_cache(self.domain)
        update_fixture(self.domain_link, self.table.tag)

        # Linked domain should still have one table, with the new rows
        linked_types = get_fixture_data_types(self.linked_domain)
        self.assertEqual(1, len(linked_types))
        self.assertEqual('moons', linked_types[0].tag)
        items = get_fixture_items_for_data_type(self.linked_domain,
                                                linked_types[0]._id)
        self.assertEqual(4, len(items))
        self.assertEqual([
            'Europa',
            'Io',
            'Jupiter',
            'Jupiter',
            'Naiad',
            'Neptune',
            'Neptune',
            'Thalassa',
        ],
                         sorted([
                             i.fields[field_name].field_list[0].field_value
                             for i in items for field_name in i.fields.keys()
                         ]))