Exemple #1
0
    def download_customize(self, item_type):
        """

        """
        item_type = validate_item_type(item_type)

        if self.request.POST:
            format = self.request.POST.get('format', 'csv')
            involvements = self.request.POST.get('involvements', 'full')
            attributes = self.request.POST.getall('attributes')
            if format == 'csv':
                header, rows = to_flat_table(
                    self.request, item_type, involvements=involvements,
                    columns=attributes)
                return render_to_response(
                    'csv', {'header': header, 'rows': rows}, self.request)

        # Order matters: The first entry is the default value.
        formats = [
            ('csv', 'CSV'),
        ]
        attributes = []
        for config_key in getCategoryList(
                self.request, item_type).getAllKeys():
            attributes.append((
                config_key.getName(), config_key.getTranslatedName()))
        if item_type == 'a':
            template = get_customized_template_path(
                self.request, 'activities/download.mak')
        else:
            template = get_customized_template_path(
                self.request, 'stakeholders/download.mak')
        template_values = {
            'profile': get_current_profile(self.request),
            'locale': get_current_locale(self.request),
            'formats': formats,
            'attributes': attributes
        }
        return render_to_response(template, template_values, self.request)
Exemple #2
0
def to_flat_table(request, item_type, involvements='full', columns=[]):

    # Query the Items with the protocol.
    # Important: Query the Items with the original database language! This
    # prevents errors when different main keys (eg. "Remark") have the exact
    # same translation. Instead, the translation happens when filling the row
    # with the help of the configs.
    item_type = validate_item_type(item_type)
    if item_type == 'a':
        items = activity_protocol.read_many(
            request, public=True, translate=False)
        other_item_type = validate_item_type('sh')
    else:
        # Query Stakeholders through Activities.
        items = stakeholder_protocol.read_many_by_activities(
            request, public=True, translate=False)
        other_item_type = validate_item_type('a')

    META_HEADER = ['id', 'version', 'timestamp']
    if item_type == 'a':
        META_HEADER.append('geometry')

    config_taggroups = []
    max_involvements = 0

    # Collect the taggroups based on the form configuration.
    config_taggroups = []
    for config_taggroup in getCategoryList(
            request, item_type).getAllTaggroups():
        config_taggroup_entry = {
            'count': 0,
            'config': config_taggroup,
            'main_key': config_taggroup.getMaintag().getKey().getName()
        }
        config_taggroups.append(config_taggroup_entry)

    # Find out how many times each taggroup occurs. This defines how many
    # columns are needed in the table.
    for item in items.get('data', []):

        # Taggroups: Identified by their main tags.
        current_main_keys = []
        for main_key in get_main_keys_from_item_json(item):
            main_key_already_found = next(
                (i for i in current_main_keys if i['key'] == main_key), None)
            if main_key_already_found:
                main_key_already_found['count'] += 1
            else:
                current_main_keys.append({'key': main_key, 'count': 1})
        for main_key in current_main_keys:
            config_main_key = next((
                i for i in config_taggroups if i['main_key']
                == main_key['key']), None)
            if config_main_key is not None:
                config_main_key['count'] = max(
                    config_main_key['count'], main_key['count'])

        # Involvements
        if involvements != 'none':
            max_involvements = max(max_involvements, len(
                item.get('involvements', [])))

    # Create the headers
    header = []
    header.extend(META_HEADER)
    for config_taggroup_entry in config_taggroups:
        config_taggroup = config_taggroup_entry.get('config')
        config_mainkey = config_taggroup.getMaintag().getKey()
        for i in range(max(config_taggroup_entry.get('count'), 1)):
            for config_tag in sorted(
                    config_taggroup.getTags(),
                    key=lambda t: t != config_taggroup.getMaintag()):

                if (columns and config_tag.getKey().getName() not in columns):
                    continue

                key_name = config_tag.getKey().getTranslatedName()
                # If the taggroup contains multiple tags, add the main key as
                # prefix
                if len(config_taggroup.getTags()) > 1:
                    key_name = '%s_%s' % (
                        config_mainkey.getTranslatedName(), key_name)
                # If the taggroup is repeated, add a number as suffix.
                if (config_taggroup.getRepeatable()
                        or config_mainkey.getType().lower() in
                        ['checkbox', 'inputtoken']):
                    key_name = '%s_%s' % (key_name, i + 1)

                header.append(unicode("%s" % key_name).encode('utf-8'))

                if columns:
                    try:
                        config_taggroup_entry['columns'].append(key_name)
                    except KeyError:
                        config_taggroup_entry['columns'] = [key_name]

    if involvements != 'none':
        inv_keys = [
            i[0] for i in getCategoryList(
                request, other_item_type).getInvolvementOverviewKeyNames()]

        involvement_header = inv_keys + ['inv_role', 'inv_id']
        for i in range(max_involvements):
            for inv_header in involvement_header:
                inv_key_name = '%s_%s' % (inv_header, i + 1)
                header.append(unicode("%s" % inv_key_name).encode('utf-8'))

    # Create the rows
    rows = []
    for item in items.get('data', []):
        row = []

        # Metadata
        for key in META_HEADER:
            if key == 'geometry':
                row.append(",".join(
                    map(str, item.get(key, {}).get("coordinates", []))))
            else:
                row.append(item.get(key, None))

        # Taggroups
        for config_taggroup_entry in config_taggroups:
            found_taggroups = []
            config_taggroup = config_taggroup_entry.get('config')
            config_mainkey = config_taggroup.getMaintag().getKey()

            for taggroup in sorted(
                    item.get('taggroups', []),
                    key=lambda tg: tg.get('tg_id', 0)):

                if taggroup['main_tag']['key'] != config_mainkey.getName():
                    continue

                for config_tag in sorted(
                        config_taggroup.getTags(),
                        key=lambda t: t != config_taggroup.getMaintag()):

                    if (columns and config_tag.getKey().getName()
                            not in columns):
                        continue

                    value = get_value_by_key_from_taggroup_json(
                        taggroup, config_tag.getKey().getName())

                    for config_value in config_tag.getValues():
                        if config_value.getName() == value:
                            value = config_value.getTranslation()

                    if (config_tag.getKey().getType().lower() == 'file'
                            and value):
                        # Uploaded files are displayed with a URL to view the
                        # file
                        files = []
                        try:
                            for v in value.split(','):
                                filename = unicode(
                                    '%s' % v.split('|')[0]).encode('utf-8')
                                url = request.route_url(
                                    'file_view', action='view',
                                    identifier=v.split('|')[1])
                                files.append('%s (%s)' % (filename, url))
                            value = '|'.join(files)
                        except:
                            pass

                    if not value:
                        value = ''

                    found_taggroups.append(
                        unicode("%s" % value).encode("utf-8"))

            # Fill up the rest of the values with None
            if columns:
                try:
                    taggroup_length = len(config_taggroup_entry['columns'])
                except KeyError:
                    taggroup_length = 0
            else:
                taggroup_length = max(
                    config_taggroup_entry.get('count'), 1) * len(
                        config_taggroup.getTags())
            found_taggroups.extend(
                [None] * (taggroup_length - len(found_taggroups)))

            row.extend(found_taggroups)

        # Involvements
        if involvements != 'none':
            inv_row = []
            for involvement in sorted(
                item.get('involvements', []), key=lambda i: (
                    i.get('role_id'), i.get('timestamp'))):
                inv_data = [None] * len(involvement_header)

                # Overview keys
                for i, config_sh_key in enumerate(
                        involvement_header[:len(involvement_header) - 2]):
                    inv_value = get_value_by_key_from_item_json(
                        involvement.get('data', {}), config_sh_key)
                    inv_data[i] = unicode("%s" % inv_value).encode("utf-8")

                # Metadata
                inv_data[len(inv_data) - 2] = involvement.get('role', None)
                inv_data[len(inv_data) - 1] = involvement.get('data', {}).get(
                    'id', None)
                inv_row.extend(inv_data)

            # Fill the rest with None
            inv_row.extend([None] * (
                len(involvement_header) * max_involvements - len(inv_row)))
            row.extend(inv_row)

        rows.append(row)

    return header, rows
Exemple #3
0
    def evaluation(self, data=None):

        ret = {'success': False}

        json_data = self.request.json_body if data is None else data
        if json_data is None:
            ret['msg'] = 'No data provided'
            return ret

        if validate_item_type(json_data.get('item', 'a')) == 'sh':
            self.db_item = Stakeholder
            self.db_taggroup = SH_Tag_Group
            self.db_tag = SH_Tag
            self.db_key = SH_Key
            self.db_value = SH_Value
            self.protocol = StakeholderProtocol3(Session)
        else:
            self.db_item = Activity
            self.db_taggroup = A_Tag_Group
            self.db_tag = A_Tag
            self.db_key = A_Key
            self.db_value = A_Value
            self.protocol = ActivityProtocol3(Session)

        # Make sure the json is valid
        if 'group_by' not in json_data:
            ret['msg'] = "Missing parameter 'group by': At least one column "
            "needs to be specified."
            return ret
        if not isinstance(json_data['group_by'], list):
            ret['msg'] = "Parameter 'group by' needs to be an array."
            return ret
        if 'attributes' not in json_data:
            ret['msg'] = "Missing attributes: No attributes were specified."
            return ret
        for attr in json_data['attributes']:
            test, msg = self._check_function(
                json_data['attributes'][attr], attr)
            if test is not True:
                ret['msg'] = msg
                return ret
        if 'locales' in json_data and not isinstance(
                json_data['locales'], list):
            ret['msg'] = "Parameter 'locales' needs to be an array."
            return ret
        translate_keys = json_data.get('translate', {}).get('keys', [])
        if translate_keys and not isinstance(translate_keys, list):
            ret['msg'] = "Parameter 'translate[\'keys\']' needs to be an "
            "array."
            return ret
            for k in translate_keys:
                if not isinstance(k, list):
                    ret['msg'] = "Value of 'translate[\'keys\']' needs to be "
                    "an array of arrays."
                    return ret
        a_ids = json_data.get('a_ids', [])
        if not isinstance(a_ids, list):
            ret['msg'] = "Parameter 'a_ids' needs to be an array."
            return ret
            for i in a_ids:
                if not isinstance(i, str):
                    ret['msg'] = "Entries of parameter 'a_ids' need to be "
                    "strings (the UUIDs of Activities)"
                    return ret
        sh_ids = json_data.get('sh_ids', [])
        if not isinstance(sh_ids, list):
            ret['msg'] = "Parameter 'sh_ids' needs to be an array."
            return ret
            for i in sh_ids:
                if not isinstance(i, str):
                    ret['msg'] = "Entries of parameter 'sh_ids' need to be "
                    "strings (the UUIDs of Stakeholders)"
                    return ret
        if self.db_item == Activity:
            this_id_filter = a_ids
            other_id_filter = sh_ids
        else:
            this_id_filter = sh_ids
            other_id_filter = a_ids

        this_filter = []
        other_filter = []
        if 'filter' in json_data:
            params = []
            for filters in json_data.get('filter', '').split('&'):
                try:
                    f = filters.split('=')
                    if len(f) == 2:
                        params.append((f[0], f[1]))
                except:
                    pass
            # Simulate a request to send the filters
            req = DummyRequest()
            req.params = MultiDict(params)
            a_tag_filter, __, sh_tag_filter, __ = self.protocol._filter(req)
            if self.db_item == Activity:
                this_filter = a_tag_filter
                other_filter = sh_tag_filter
            else:
                this_filter = sh_tag_filter
                other_filter = a_tag_filter

        isInvolvementRequired = (
            self.db_item == Stakeholder
            or len(other_filter) + len(other_id_filter) > 0)

        # Collect all keys to be translated (values are translated in the
        # query)
        locales = ['default']
        langs = []
        locales.extend(json_data.get('locales', []))
        translated_keys = {}
        exclude_from_translation = ['Activity', 'Stakeholder']
        keys = []
        for key, __ in json_data.get('attributes', {}).iteritems():
            if key not in exclude_from_translation and key not in keys:
                keys.append(key)
        for key in json_data.get('group_by', []):
            if key not in exclude_from_translation and key not in keys:
                keys.append(key)
        for key in translate_keys:
            for k in key:
                if k not in keys:
                    keys.append(k)
        for l in locales:
            locale = l
            if l == 'default':
                locale = get_current_locale(self.request)
            db_lang = Session.query(Language).filter(
                Language.locale == locale).first()
            langs.append((l, db_lang))
            translated_keys[l] = get_translated_db_keys(
                self.db_key, keys, db_lang)

        # Get groups
        groups_subqueries, groups_columns = self._get_group_by(
            json_data['group_by'], langs)

        # Get functions
        functions_subqueries, functions_columns = \
            self._get_attribute_functions(json_data['attributes'])

        # Prepare basic query
        q = Session.query(*groups_columns + functions_columns).\
            join(self.db_taggroup).\
            join(self.db_item)

        # Join with further groups
        for g_sq in groups_subqueries[1:]:
            q = q.outerjoin(g_sq, g_sq.c.item_id == self.db_item.id)

        # Join with functions
        for f_sq in functions_subqueries:
            q = q.outerjoin(f_sq, f_sq.c.item_id == self.db_item.id)

        # Apply status filter (fix: active)
        q = q.filter(self.db_item.fk_status == 2)

        if (this_id_filter):
            q = q.filter(self.db_item.identifier.in_(this_id_filter))

        # Apply filters
        filter_subqueries = self.protocol.Session.query(
            self.db_item.id.label('a_filter_id')
        )
        for x in this_filter:
            # Collect the IDs for each filter
            taggroups_sq = x.subquery()
            single_subquery = self.protocol.Session.query(
                self.db_item.id.label('a_filter_id')
            ).\
                join(self.db_taggroup).\
                join(taggroups_sq,
                     taggroups_sq.c.a_filter_tg_id == self.db_taggroup.id).\
                subquery()
            # Join each found ID with previously found IDs
            filter_subqueries = filter_subqueries.\
                join(single_subquery,
                     single_subquery.c.a_filter_id == self.db_item.id)
        filter_subqueries = filter_subqueries.subquery()
        q = q.join(
            filter_subqueries,
            filter_subqueries.c.a_filter_id == self.db_item.id)

        # Apply profile boundary filter
        if self.db_item == Activity:
            p = json_data.get('profile', get_current_profile(self.request))
            profile = Session.query(Profile).\
                filter(Profile.code == p).\
                first()
            if profile is not None:
                q = q.filter(geofunctions.intersects(
                    self.db_item.point, profile.geometry))

        # Apply grouping and ordering
        q = q.group_by(*groups_columns).\
            order_by(groups_columns[0])

        if isInvolvementRequired:
            if self.db_item == Stakeholder:
                inv_subquery = Session.query(
                    Involvement.fk_stakeholder.label('id')
                ).\
                    join(Activity).\
                    filter(Activity.fk_status == 2)
                p = json_data.get('profile', get_current_profile(self.request))
                profile = Session.query(Profile).\
                    filter(Profile.code == p).\
                    first()
                if profile is not None:
                    inv_subquery = inv_subquery.filter(geofunctions.intersects(
                        Activity.point, profile.geometry))
                other_db_item = Activity
                other_db_taggroup = A_Tag_Group
            else:
                inv_subquery = Session.query(
                    Involvement.fk_activity.label('id')
                ).\
                    join(Stakeholder).\
                    filter(Stakeholder.fk_status == 2)
                other_db_item = Stakeholder
                other_db_taggroup = SH_Tag_Group

            if (other_id_filter):
                inv_subquery = inv_subquery.filter(
                    other_db_item.identifier.in_(other_id_filter))

            # Apply filters
            filter_subqueries = self.protocol.Session.query(
                other_db_item.id.label('a_filter_id')
            )

            for x in other_filter:
                # Collect the IDs for each filter
                taggroups_sq = x.subquery()
                try:
                    single_subquery = self.protocol.Session.query(
                        other_db_item.id.label('a_filter_id')
                    ).\
                        join(other_db_taggroup).\
                        join(taggroups_sq,
                             taggroups_sq.c.a_filter_tg_id == other_db_taggroup.id).\
                        subquery()
                except AttributeError:
                    single_subquery = self.protocol.Session.query(
                        other_db_item.id.label('a_filter_id')
                    ).\
                        join(other_db_taggroup).\
                        join(taggroups_sq,
                             taggroups_sq.c.sh_filter_tg_id == other_db_taggroup.id).\
                        subquery()
                # Join each found ID with previously found IDs
                filter_subqueries = filter_subqueries.\
                    join(single_subquery,
                         single_subquery.c.a_filter_id == other_db_item.id)

            filter_subqueries = filter_subqueries.subquery()
            inv_subquery = inv_subquery.join(
                filter_subqueries,
                filter_subqueries.c.a_filter_id == other_db_item.id)

            inv_subquery = inv_subquery.subquery()
            q = q.filter(self.db_item.id.in_(
                select([inv_subquery.c.id])
            ))

        data = []
        for res in q.all():
            data = _handle_single_line(
                data, res, json_data.get('group_by'),
                json_data.get('attributes'), translated_keys)

        # Do a translation of groupable if available
        groupable_translated = []
        for key in translate_keys:
            translations = []
            for k in key:
                t = {
                    'key': k,
                    'default': k
                }
                for locale, key_translations in translated_keys.iteritems():
                    translation = (
                        None if k not in exclude_from_translation else k)
                    for k_t in key_translations:
                        if len(k_t) >= 2 and k_t[0] == k:
                            translation = k_t[1]
                    t[locale] = translation
                translations.append(t)
            groupable_translated.append(translations)
        if len(groupable_translated):
            ret.update({
                'translate': {'keys': groupable_translated}
            })

        ret.update({
            'success': True,
            'data': data
        })

        return ret