示例#1
0
def custom_json_handler(obj):
    if obj == Missing.Value:
        return None
    if type(obj) in (datetime.datetime, datetime.date):
        return obj.isoformat()
    if type(obj) == DateTime:
        dt = DateTime(obj)
        return dt.ISO()
    if type(obj) == set:
        return list(obj)
    return obj
示例#2
0
def conf_utils_custom_json_handler(obj):
    if obj == Missing.Value:
        return None
    if type(obj) in (datetime.datetime, datetime.date):
        return obj.isoformat()
    if type(obj) == DateTime:
        dt = DateTime(obj)
        return dt.ISO()
    if type(obj) == set:
        return list(obj)
    if type(obj) == RelationValue:
        return obj.to_path.split('/')[-1]
    if type(obj) == RelationList:
        return list(item.to_path.split('/')[-1] for item in obj)
    return obj
示例#3
0
    def test_store_property_types(self):
        # Test that Ape restores properties to the correct types.
        from DateTime import DateTime
        now = DateTime()
        conn = self.db.open()
        try:
            app = conn.root()['Application']
            f = Folder()
            f.id = 'Holidays'
            app._setObject(f.id, f, set_owner=0)
            transaction.commit()

            f._setProperty('string1', 's', 'string')
            f._setProperty('float1', 3.14, 'float')
            f._setProperty('int1', 5, 'int')
            f._setProperty('long1', 2L**33, 'long')
            f._setProperty('date1', now, 'date')
            f._setProperty('date2', now, 'date_international')
            f._setProperty('text1', 'abc\ndef', 'text')
            f._setProperty('boolean0', 0, 'boolean')
            f._setProperty('boolean1', 1, 'boolean')
            transaction.commit()

            conn2 = self.db.open()
            try:
                app2 = conn2.root()['Application']
                f2 = app2.Holidays
                self.assertEqual(f2.string1, 's')
                self.assertEqual(f2.float1, 3.14)
                self.assertEqual(f2.int1, 5)
                self.assertEqual(f2.long1, 2L**33)
                self.assertEqual(f2.date1.ISO(), now.ISO())
                self.assertEqual(f2.date2.ISO(), now.ISO())
                self.assertEqual(f2.text1, 'abc\ndef')
                self.assertEqual(f2.boolean0, 0)
                self.assertEqual(f2.boolean1, 1)
            finally:
                conn2.close()

        finally:
            conn.close()
示例#4
0
def dateForProcessForm(field, field_date, form_dict=None):
    """Take a DateTime object or string and convert it into the keys that
    processForm expects

    If form_dict is not passed in a dictionary will be passed back. Otherwise
    the form dictionary will be updated.
    """
    if not isinstance(field_date, DateTime):
        field_date = DateTime(field_date)
    will_return = False
    if form_dict is None:
        will_return = True
        form_dict = {}
    form_dict[field] = field_date.ISO()
    form_dict['%s_year' % field] = field_date.year()
    form_dict['%s_month' % field] = field_date.month()
    form_dict['%s_day' % field] = field_date.day()
    form_dict['%s_hour' % field] = field_date.hour()
    form_dict['%s_minute' % field] = field_date.minute()
    if not will_return:
        return
    return form_dict
def export(self):
    response = self.REQUEST and self.REQUEST.RESPONSE or None
    maxdepth = self.REQUEST.get('maxdepth', MAXDEPTH)
    try:
        maxdepth = int(maxdepth)
    except:
        maxdepth = MAXDEPTH
    only_en = bool(self.REQUEST.get('onlyen', False))
    include_data = bool(self.REQUEST.get('include_data_folder', False))
    log = setupLog(self, response)
    start_time = DateTime()
    log.write('<h2>Contents metadata export</h2>')
    log.write('Starting at ' + ` start_time.ISO() `)
    log.write(
        'Maximum depth (set using maxdepth= in the URL): <strong>%d</strong>' %
        maxdepth)
    log.write(
        'English only (set using onlyen=1 in the URL)? <strong>%s</strong>' %
        only_en)
    log.write(
        'Include /data folder (set using include_data_folder=1 in the URL)? <strong>%s</strong>'
        % include_data)
    doRecurse = True

    pwt = getToolByName(self, 'portal_workflow')

    dialect = csv.excel()
    # we use semicolon instead of comma, seems to be Excel standard now
    dialect.delimiter = ';'
    dialect.quoting = csv.QUOTE_ALL

    fh = open(filename, 'w')
    fh.write(codecs.BOM_UTF8)
    writer = csv.writer(fh, dialect=dialect)
    writer.writerow(HEADER)

    statistics = dict()

    def do(item, doRecurse=True, level=0):
        if item.id in IGNORE:
            return
        # Only first XX levels:
        if maxdepth > -1 and level > maxdepth:
            doRecurse = False
        log.write("* Export item %s" % '/'.join(item.getPhysicalPath()))
        line = list()
        line.append('/'.join(item.getPhysicalPath()))
        line.append(
            hasattr(item.aq_explicit, 'Language') and item.Language() or '')
        country = hasattr(item.aq_explicit,
                          'getCountry') and item.getCountry() or ''
        if country:
            country = ','.join(country)
        line.append(country)
        line.append(
            hasattr(item.aq_explicit, 'created') and item.created().ISO()
            or 'n/a')
        line.append(
            hasattr(item.aq_explicit, 'modified') and item.modified().ISO()
            or 'n/a')
        title = item.title_or_id()
        line.append(get_unicode_text(title))
        description = hasattr(item.aq_explicit,
                              'Description') and item.Description() or ''
        # This sad hack is necessary to appease M$-Excel
        # Of course, LibreOffice and iWorks Numbers know what's an EOL and
        # whats not...
        description = description.replace('\r\n', ' ').replace('\n', ' ')
        line.append(get_unicode_text(description))
        line.append(
            hasattr(item.aq_explicit, 'Subject') and ','.join(item.Subject())
            or '')
        line.append(hasattr(item.aq_explicit, 'getNace') and \
            ','.join(item.getNace()) or "")
        line.append(hasattr(item.aq_explicit, 'getMultilingual_thesaurus') and \
            ','.join(item.getMultilingual_thesaurus()) or "")
        line.append(
            hasattr(item.aq_explicit, 'portal_type') and item.portal_type
            or item.meta_type)
        sizer = item.restrictedTraverse('getObjSize', None)
        try:
            size = sizer and sizer() or '0 kB'
        except:  # No blob file
            size = '0 kB'
        line.append(size)
        line.append(
            hasattr(item.aq_explicit, 'Creator') and item.Creator() or '')
        line.append(hasattr(item.aq_explicit, 'getRemoteLanguage') and \
            ','.join(item.getRemoteLanguage()) or '')
        try:
            wf_state = pwt.getInfoFor(item, 'review_state')
        except:
            wf_state = ''
        line.append(wf_state)
        start_date = hasattr(item.aq_explicit, 'start') and item.start()
        line.append(start_date and start_date.ISO() or '')
        end_date = hasattr(item.aq_explicit, 'end') and item.end()
        line.append(end_date and end_date.ISO() or '')

        writer.writerow([x and x.encode("UTF-8") or '' for x in line])

        if doRecurse and hasattr(item.aq_explicit, 'objectValues'):
            log.write('Contents of sub-folder %s' %
                      '/'.join(item.getPhysicalPath()))
            for id in item.objectIds():
                try:
                    ob = getattr(item, id)
                except:
                    ob = None
                if ob:
                    do(ob, doRecurse, level + 1)

    portal = getToolByName(self, 'portal_url').getPortalObject()
    langs = getToolByName(self, 'portal_languages').getSupportedLanguages()
    langs.sort()
    if only_en:
        langs = ['en']
    else:
        langs = ['en'] + [x for x in langs if x != 'en']
    if include_data:
        langs.append('data')

    for lang in langs:
        start = getattr(portal, lang, None)
        if start is None:
            print "No top-level folder for language %s" % lang
            continue
        log.write('<h3>Handling top-level folder "%s"</h3>' % lang)
        do(start, True, 0)

    fh.close()
    finished = DateTime()
    delta = (finished - start_time)
    log.write('<br/><br/>Finished at ' + ` finished.ISO() `)

    finish(self, response)
示例#6
0
    def index_html(self, REQUEST):
        """ """
        uid = REQUEST.form.get('uid')
        if not uid:
            # a missing uid can only mean this page is called by accident
            return
        date_for_roles = REQUEST.form.get('date_for_roles')

        if "," in uid:
            user = None
            roles = None
            multi = json.dumps({'users': uid.split(",")})
        else:
            multi = None
            user, roles = self._prepare_user_page(uid)

        is_auth = _is_authenticated(REQUEST)
        # we can only connect to ldap with bind=True if we have an
        # authenticated user
        agent = self._get_ldap_agent(bind=is_auth)

        user_dn = agent._user_dn(uid)
        log_entries = list(reversed(agent._get_metadata(user_dn)))
        VIEWS = {}
        filtered_roles = set([info[0] for info in roles])  # + owner_roles)
        if date_for_roles:
            filter_date = DateTime(date_for_roles).asdatetime().date()
        else:
            filter_date = DateTime().asdatetime().date()

        for entry in log_entries:
            date = DateTime(entry['timestamp']).toZone("CET")
            entry['timestamp'] = date.ISO()
            view = VIEWS.get(entry['action'])
            if not view:
                view = getMultiAdapter((self, self.REQUEST),
                                       name="details_" + entry['action'])
                VIEWS[entry['action']] = view
            entry['view'] = view

            _roles = entry.get('data', {}).get('roles')
            _role = entry.get('data', {}).get('role')
            if date.asdatetime().date() >= filter_date:
                if entry['action'] == 'ENABLE_ACCOUNT':
                    filtered_roles.difference_update(set(_roles))
                elif entry['action'] == "DISABLE_ACCOUNT":
                    filtered_roles.update(set(_roles))
                elif entry['action'] in ["ADDED_TO_ROLE"]:
                    if _role and _role in filtered_roles:
                        filtered_roles.remove(_role)
                elif entry['action'] in ["REMOVED_FROM_ROLE"]:
                    if _role:
                        filtered_roles.add(_role)

        output = []
        for entry in log_entries:
            if output:
                last_entry = output[-1]
                check = ['author', 'action']
                flag = True
                for k in check:
                    if last_entry[k] != entry[k]:
                        flag = False
                        break
                if flag:
                    last_entry['data'].append(entry['data'])
                else:
                    entry['data'] = [entry['data']]
                    output.append(entry)
            else:
                entry['data'] = [entry['data']]
                output.append(entry)

        removed_roles = []
        if user.get('status') == 'disabled':
            auth_user = self.REQUEST.AUTHENTICATED_USER
            if not bool(auth_user.has_permission(eionet_edit_users, self)):
                raise NotFound("User '%s' does not exist" % uid)
            # process log entries to list the roles the user had before
            # being disabled
            for entry in log_entries:
                if entry['action'] == 'DISABLE_ACCOUNT':
                    for role in entry['data'][0]['roles']:
                        try:
                            role_description = agent.role_info(
                                role)['description']
                        except:
                            role_description = ("This role doesn't exist "
                                                "anymore")
                        removed_roles.append((role, role_description))
                    break

        return self._render_template("zpt/userdetails/index.zpt",
                                     context=self,
                                     filtered_roles=filtered_roles,
                                     user=user,
                                     roles=roles,
                                     removed_roles=removed_roles,
                                     multi=multi,
                                     log_entries=output)