Esempio n. 1
0
    def authenticateCredentials(self, credentials):
        """
        Authenticate credentials
        """
        login = credentials.get('login')
        if not login:
            return None

        self._invalidatePrincipalCache(login)
        user = api.user.get(username=login)
        if not user:
            return None
        password_date = user.getProperty('password_date',
                                         DateTime('2000/01/01'))
        if password_date.Date() != '2000/01/01':
            current_time = DateTime()
            registry = getUtility(IRegistry)
            validity_period = registry['collective.pwexpiry.validity_period']
            since_last_pw_reset = days_since_event(password_date.asdatetime(),
                                                   current_time.asdatetime())
            # Password has expired
            if validity_period - since_last_pw_reset < 0:
                self.REQUEST.RESPONSE.setHeader('user_expired', user.getId())
                raise Unauthorized
        return None
    def authenticateCredentials(self, credentials):
        """
        Check if the user.password_date is older than validity_period.
        If validity_period is 0, skip the check
        """
        login = credentials.get('login')
        if not login:
            return None

        self._invalidatePrincipalCache(login)
        user = api.user.get(username=login)
        if not user:
            return None

        registry = getUtility(IRegistry)
        validity_period = registry['collective.pwexpiry.validity_period']
        if validity_period == 0:
            return None

        # Ignore Managers
        if user.has_role('Manager'):
            return None

        password_date = user.getProperty('password_date', '2000/01/01')
        if str(password_date) != '2000/01/01':
            current_time = DateTime()
            since_last_pw_reset = days_since_event(password_date.asdatetime(),
                                                   current_time.asdatetime())
            # Password has expired
            if validity_period - since_last_pw_reset < 0:
                self.REQUEST.RESPONSE.setHeader('user_expired', user.getId())
                raise Unauthorized
        return None
Esempio n. 3
0
    def authenticateCredentials(self, credentials):
        """
        Check if the user.password_date is older than validity_period.
        If validity_period is 0, skip the check
        """
        login = credentials.get('login')
        if not login:
            return None

        self._invalidatePrincipalCache(login)
        user = api.user.get(username=login)
        if not user:
            return None

        registry = getUtility(IRegistry)
        validity_period = registry['collective.pwexpiry.validity_period']
        if validity_period == 0:
            return None

        # Ignore Managers
        if user.has_role('Manager'):
            return None

        password_date = user.getProperty('password_date', '2000/01/01')
        if str(password_date) != '2000/01/01':
            current_time = DateTime()
            since_last_pw_reset = days_since_event(password_date.asdatetime(),
                                                   current_time.asdatetime())
            # Password has expired
            if validity_period - since_last_pw_reset < 0:
                self.REQUEST.RESPONSE.setHeader('user_expired', user.getId())
                raise Unauthorized
        return None
Esempio n. 4
0
def update_password_expiry(site):
    setup_site(site)

    pwexpiry_enabled = api.portal.get_registry_record('plone.pwexpiry_enabled', default=False)
    validity_period = api.portal.get_registry_record('plone.pwexpiry_validity_period', default=0)

    for user in api.user.get_users():
        if pwexpiry_enabled and validity_period > 0:
            whitelist = api.portal.get_registry_record('plone.pwexpiry_whitelisted_users', default=[])
            whitelisted = whitelist and user.getId() in whitelist
            if not whitelisted:
                password_date = user.getProperty('password_date', None)
                current_time = DateTime()

                if password_date:
                    since_last_pw_reset = days_since_event(
                        password_date.asdatetime(),
                        current_time.asdatetime()
                    )

                    remaining_days = validity_period - since_last_pw_reset
                    if remaining_days < 0:
                        # Password has expired
                        user.setMemberProperties({
                            'reset_password_required': True,
                            'reset_password_time': time.time()
                        })
                        continue
                    if remaining_days < 2:
                        send_notification_email(user, remaining_days)
                else:
                    user.setMemberProperties({
                        'password_date': current_time
                    })
Esempio n. 5
0
def convert_to_datetime(date):
    if date in (None, 'None'):
        return None
    if isinstance(date, basestring):
        date = DateTime(date)
    if isinstance(date, DateTime):
        date = date.asdatetime()
    return date
Esempio n. 6
0
    def create(self,
               description,
               location_uid,
               date,
               time,
               duration,
               private,
               member=None):

        if getToolByName(self.context, 'portal_membership').isAnonymousUser():
            return self.template()

        if private == 'True':
            private = '1'
        else:
            private = '0'

        id = str(random.randint(1, 10000))
        eid = "gf-event-" + date + "-" + id + "-" + private

        if len(description) > 40:
            description = description[0:40] + "..."

        # Time Setup
        fTime = time.split(":")
        start = DateTime(date + " " + time + " US/CENTRAL")
        end = DateTime(start)
        tmp = end.asdatetime() + datetime.timedelta(minutes=int(duration))
        end = DateTime(tmp)

        #_createObjectByType bypasses permission security.
        _createObjectByType("GroupFinderEvent",
                            self.context,
                            id=eid,
                            title=description,
                            location=location_uid)
        obj = self.context.get(eid, None)

        obj.setTitle(description)
        obj.setLocation(location_uid)

        if member == None:
            member = getToolByName(
                self.context, 'portal_membership').getAuthenticatedMember()
            obj.setCreators(
                ["Confidential",
                 member.getUserName(),
                 member.getUserName()])
            obj.setAttendees(member.getUserName())
        else:
            name = member.split('@')
            staff = getToolByName(
                self.context, 'portal_membership').getAuthenticatedMember()
            obj.setCreators(["Staff", name[0], staff.getUserName()])
            obj.setAttendees(name[0])
        obj.setStartDate(start)
        obj.setEndDate(end)
        obj.reindexObject()
Esempio n. 7
0
 def booking_date(self):
     ''' Compute the booking date as passed in the request
     '''
     booking_date = self.request.form.get('booking_date', DateTime())
     if not booking_date:
         return []
     if isinstance(booking_date, basestring):
         booking_date = DateTime(booking_date)
     return booking_date.asdatetime().date()
Esempio n. 8
0
 def testBasicTZ(self):
     # psycopg2 supplies it's own tzinfo instances, with no `zone` attribute
     tz = FixedOffset(60, 'GMT+1')
     dt1 = datetime(2008, 8, 5, 12, 0, tzinfo=tz)
     DT = DateTime(dt1)
     dt2 = DT.asdatetime()
     offset1 = dt1.tzinfo.utcoffset(dt1)
     offset2 = dt2.tzinfo.utcoffset(dt2)
     self.assertEqual(offset1, offset2)
 def testBasicTZ(self):
     # psycopg2 supplies it's own tzinfo instances, with no `zone` attribute
     tz = FixedOffset(60, 'GMT+1')
     dt1 = datetime(2008, 8, 5, 12, 0, tzinfo=tz)
     DT = DateTime(dt1)
     dt2 = DT.asdatetime()
     offset1 = dt1.tzinfo.utcoffset(dt1)
     offset2 = dt2.tzinfo.utcoffset(dt2)
     self.assertEqual(offset1, offset2)
Esempio n. 10
0
    def isComplete(self):

        if self.laureando_valutazione:
            #Voto = 11/3 * M + C + P + L + E

            #(C + P + L + E) <=11
            #Il voto della prova finale, V, è calcolato tramite C <= 8/27 M
            #(L + E) <= 2

            #max_voto_proposto = round(8*(self.laureando_avg)/27)

            zope_DT = DateTime()
            python_dt = zope_DT.asdatetime()
            curr_year = python_dt.year

            bonus_temporale = 0

            if self.laureando_nlodi:
                bonus_lodi = ((self.laureando_nlodi) / 3)
            else:
                bonus_lodi = 0

            if self.laureando_anno_immatricolazione:
                if (curr_year - (self.laureando_anno_immatricolazione) -
                        3) == 1:
                    bonus_temporale = 1
                if (curr_year - (self.laureando_anno_immatricolazione) -
                        3) == 0:
                    bonus_temporale = 2

            if bonus_lodi > 2:
                bonus_lodi = 2

            if (bonus_temporale + bonus_lodi +
                    self.laureando_valutazione) > 11:
                totale = (self.laureando_avg) + 11
            else:
                totale = (
                    self.laureando_avg
                ) + bonus_temporale + bonus_lodi + self.laureando_valutazione

            if round(totale) > 110:
                self.laureando_voto_di_laurea = int(110)
            else:
                self.laureando_voto_di_laurea = int(round(totale))

            if (3 * (self.laureando_avg) / 11) >= 28:
                self.laureando_lode = True
            else:
                self.laureando_lode = False

            return 1
        else:
            return 0
Esempio n. 11
0
 def testConversions(self):
     sdt0 = datetime.now() # this is a timezone naive datetime
     dt0 = DateTime(sdt0)
     assert dt0.timezoneNaive() is True, (sdt0, dt0)
     sdt1 = datetime(2007, 10, 4, 18, 14, 42, 580, pytz.utc)
     dt1 = DateTime(sdt1)
     assert dt1.timezoneNaive() is False, (sdt1, dt1)
     
     # convert back
     sdt2 = dt0.asdatetime()
     self.assertEqual(sdt0, sdt2)
     sdt3 = dt1.utcdatetime() # this returns a timezone naive datetime
     self.assertEqual(sdt1.hour, sdt3.hour)
     
     dt4 = DateTime('2007-10-04T10:00:00+05:00')
     sdt4 = datetime(2007, 10, 4, 5, 0)
     self.assertEqual(dt4.utcdatetime(), sdt4)
     self.assertEqual(dt4.asdatetime(), sdt4.replace(tzinfo=pytz.utc))
     
     dt5 = DateTime('2007-10-23 10:00:00 US/Eastern')
     tz = pytz.timezone('US/Eastern')
     sdt5 = datetime(2007, 10, 23, 10, 0, tzinfo=tz)
     dt6 = DateTime(sdt5)
     self.assertEqual(dt5.asdatetime(), sdt5)
     self.assertEqual(dt6.asdatetime(), sdt5)
     self.assertEqual(dt5, dt6)
     self.assertEqual(dt5.asdatetime().tzinfo, tz)
     self.assertEqual(dt6.asdatetime().tzinfo, tz)
    def testConversions(self):
        sdt0 = datetime.now()  # this is a timezone naive datetime
        dt0 = DateTime(sdt0)
        self.assertTrue(dt0.timezoneNaive(), (sdt0, dt0))
        sdt1 = datetime(2007, 10, 4, 18, 14, 42, 580, pytz.utc)
        dt1 = DateTime(sdt1)
        self.assertFalse(dt1.timezoneNaive(), (sdt1, dt1))

        # convert back
        sdt2 = dt0.asdatetime()
        self.assertEqual(sdt0, sdt2)
        sdt3 = dt1.utcdatetime()  # this returns a timezone naive datetime
        self.assertEqual(sdt1.hour, sdt3.hour)

        dt4 = DateTime('2007-10-04T10:00:00+05:00')
        sdt4 = datetime(2007, 10, 4, 5, 0)
        self.assertEqual(dt4.utcdatetime(), sdt4)
        self.assertEqual(dt4.asdatetime(), sdt4.replace(tzinfo=pytz.utc))

        dt5 = DateTime('2007-10-23 10:00:00 US/Eastern')
        tz = pytz.timezone('US/Eastern')
        sdt5 = datetime(2007, 10, 23, 10, 0, tzinfo=tz)
        dt6 = DateTime(sdt5)
        self.assertEqual(dt5.asdatetime(), sdt5)
        self.assertEqual(dt6.asdatetime(), sdt5)
        self.assertEqual(dt5, dt6)
        self.assertEqual(dt5.asdatetime().tzinfo, tz)
        self.assertEqual(dt6.asdatetime().tzinfo, tz)
Esempio n. 13
0
    def createFeedback(self, feedback_txt):
        """
        """
        log = logging.getLogger('createFeedback:')
        folder_conteudo = 'Feedback Admin'
        site = getSite().siproquim2
        id_folder = queryUtility(IIDNormalizer).normalize(folder_conteudo)

        if not hasattr(site, id_folder):
            site.invokeFactory('Folder', id=id_folder, title=folder_conteudo)

        folderFeedback = getattr(site, id_folder)

        paginaContext = {
            'titulo': self.context.Title(),
            'uid': self.context.UID(),
            'caminho': '/'.join(self.context.getPhysicalPath()),
        }

        zope_DT = DateTime()  # this is now.
        python_dt = zope_DT.asdatetime()
        zope_DT = DateTime(python_dt)
        data_feedback = zope_DT.strftime('%d/%m/%Y-%H:%M')
        data_milisecond = zope_DT.strftime('%s')

        # import pdb; pdb.set_trace()

        titulo_content = 'Feedback ' + ' - ' + data_feedback + ' - ' + paginaContext[
            'uid']
        id_content = 'feedback ' + data_feedback + '-' + data_milisecond
        id = queryUtility(IIDNormalizer).normalize(id_content)

        _createObjectByType('Document',
                            folderFeedback,
                            id,
                            title=titulo_content,
                            description=paginaContext['caminho'],
                            location=paginaContext['uid'],
                            creators='anonimo',
                            text=feedback_txt)

        obj = getattr(folderFeedback, id)
        if obj:
            obj.setTitle(titulo_content)
            obj.setDescription(paginaContext['caminho']),
            obj.setText(feedback_txt),
            obj.setLocation(paginaContext['uid']),
            obj.setCreators('anonimo')
            obj.reindexObject()

        log.info(id)
        return self.request.response.redirect(self.url_sucess)
Esempio n. 14
0
def years_voc(self):
    zope_DT = DateTime()
    python_dt = zope_DT.asdatetime()
    curr_year = python_dt.year
    last_year = int(curr_year) - 3
    init_year = int(curr_year) - 10
    years = []
    y = init_year
    while y <= last_year:
        years.append(y)
        y = y + 1

    voc = SimpleVocabulary.fromValues(years)
    return voc
Esempio n. 15
0
def static_modification_date_getter_factory(dt_value, type_=DateTime):
    """Returns a static time replacement for modification date accessors,
    configured with the given datetime value and the indicated type_.
    """
    if isinstance(dt_value, datetime) and type_ is DateTime:
        dt_value = DateTime(dt_value)

    elif isinstance(dt_value, DateTime) and type_ is datetime:
        dt_value = dt_value.asdatetime()

    def static_modification_date_getter(self, value=None):
        return dt_value

    return static_modification_date_getter
Esempio n. 16
0
def static_wc_info_factory(dt_value, type_=DateTime):
    """Returns a static time replacement for creation date accessors,
    configured with the given datetime value and the indicated type_ for
    working copy support.
    """
    if isinstance(dt_value, datetime) and type_ is DateTime:
        dt_value = DateTime(dt_value)

    elif isinstance(dt_value, DateTime) and type_ is datetime:
        dt_value = dt_value.asdatetime()

    def static_wc_info(self):
        return dt_value

    return static_wc_info
Esempio n. 17
0
    def authenticateCredentials(self, credentials):
        """
        Check if the user.password_date is older than validity_period.
        If validity_period is 0, skip the check
        """
        login = credentials.get('login')
        if not login:
            return None

        self._invalidatePrincipalCache(login)
        user = api.user.get(username=login)
        if not user:
            return None

        pwexpiry_enabled = api.portal.get_registry_record(
            'plone.pwexpiry_enabled', default=False)
        validity_period = api.portal.get_registry_record(
            'plone.pwexpiry_validity_period', default=0)
        if validity_period == 0 or not pwexpiry_enabled:
            return None

        # Ignore whitelisted
        whitelisted = api.portal.get_registry_record(
            'plone.pwexpiry_whitelisted_users', default=[])
        if whitelisted and user.getId() in whitelisted:
            return None

        password_date = user.getProperty('password_date', '2000/01/01')
        if str(password_date) != '2000/01/01':
            current_time = DateTime()
            since_last_pw_reset = days_since_event(password_date.asdatetime(),
                                                   current_time.asdatetime())
            # Password has expired
            if validity_period - since_last_pw_reset < 0:
                user.setMemberProperties({
                    'reset_password_required': True,
                    'reset_password_time': time.time()
                })
                self.REQUEST.RESPONSE.setHeader('user_expired', user.getId())
                raise Unauthorized
        else:
            user.setMemberProperties({'password_date': current_time})
        return None
    def authenticateCredentials(self, credentials):
        """
        Check if the user.password_date is older than validity_period.
        """
        login = credentials.get('login')
        if not login:
            return None

        self._invalidatePrincipalCache(login)
        user = api.user.get(username=login)
        if not user:
            return None

        registry = getUtility(IRegistry)

        # Checks if validity is enabled, 0 disabled
        validity_period = registry['collective.pwexpiry.validity_period']
        if validity_period == 0:
            return None

        # Ignore Managers
        if user.has_role('Manager'):
            return None

        # Ignore whitelisted
        whitelisted = registry.get('collective.pwexpiry.whitelisted_users')
        if whitelisted and user.getId() in whitelisted:
            return None

        password_date = user.getProperty('password_date', '2000/01/01')
        if str(password_date) != '2000/01/01':
            current_time = DateTime()
            since_last_pw_reset = days_since_event(password_date.asdatetime(),
                                                   current_time.asdatetime())
            # Password has expired
            if validity_period - since_last_pw_reset < 0:
                self.REQUEST.RESPONSE.setHeader('user_expired', user.getId())
                raise Unauthorized
        # If not password_date set, set current
        else:
            user.setMemberProperties({'password_date': current_time})
        return None
Esempio n. 19
0
 def create(self,description,location_uid,date,time,duration,private,member=None):
     
     if getToolByName(self.context, 'portal_membership').isAnonymousUser():
         return self.template()
     
     if private == 'True':
         private  = '1'
     else:
         private  = '0'
     
     id = str(random.randint(1, 10000))
     eid = "gf-event-"+date+"-"+id+"-"+private
     
     if len(description) > 40:
         description = description[0:40] + "..."
     
     # Time Setup
     fTime = time.split(":")
     start = DateTime(date + " " + time + " US/CENTRAL")
     end = DateTime(start)
     tmp = end.asdatetime() + datetime.timedelta(minutes=int(duration))
     end = DateTime(tmp)
     
     #_createObjectByType bypasses permission security.
     _createObjectByType("GroupFinderEvent", self.context, id=eid, title=description, location=location_uid)
     obj = self.context.get(eid, None)
     
     obj.setTitle(description)
     obj.setLocation(location_uid)
     
     if member == None:
         member = getToolByName(self.context, 'portal_membership').getAuthenticatedMember()
         obj.setCreators(["Confidential",member.getUserName(),member.getUserName()])
         obj.setAttendees(member.getUserName())
     else:
         name = member.split('@')
         staff = getToolByName(self.context, 'portal_membership').getAuthenticatedMember()
         obj.setCreators(["Staff",name[0],staff.getUserName()])
         obj.setAttendees(name[0])
     obj.setStartDate(start)
     obj.setEndDate(end)
     obj.reindexObject()
Esempio n. 20
0
    def authenticateCredentials(self, credentials):
        """
        Check if the user.password_date is older than validity_period.
        If validity_period is 0, skip the check
        """
        login = credentials.get("login")
        if not login:
            return None

        self._invalidatePrincipalCache(login)
        user = api.user.get(username=login)
        if not user:
            return None

        registry = getUtility(IRegistry)
        validity_period = registry["collective.pwexpiry.validity_period"]
        if validity_period == 0:
            return None

        # Ignore Managers
        if user.has_role("Manager"):
            return None

        # Ignore whitelisted
        whitelisted = api.portal.get_registry_record(
            "collective.pwexpiry.whitelisted_users"
        )
        if whitelisted and user.getId() in whitelisted:
            return None

        password_date = user.getProperty("password_date", "2000/01/01")
        if str(password_date) != "2000/01/01":
            current_time = DateTime()
            since_last_pw_reset = days_since_event(
                password_date.asdatetime(), current_time.asdatetime()
            )
            # Password has expired
            if validity_period - since_last_pw_reset < 0:
                self.REQUEST.RESPONSE.setHeader("user_expired", user.getId())
                raise Unauthorized
        return None
    def getSeatsPropertiesForLogin(self, login):

        # initialize max_seats at 1
        max_seats = 1
        seat_timeout = 5  # initialize to 5 minutes

        if self.login_member_data_mapping is None:
            self.login_member_data_mapping = OOBTree(
            )  # if this has not been initialized then do it now
            if self.DEBUG:
                print "Initialized the Login Member Data Mapping"

        # if the max_seats has a valid cached value, then use it
        cached_member_data = self.login_member_data_mapping.get(login, None)

        now = DateTime()
        if cached_member_data and 'expireTime' in cached_member_data and 'maxSeats' in cached_member_data and 'seatTimeoutInMinutes' in cached_member_data and now < cached_member_data[
                'expireTime']:
            max_seats = cached_member_data['maxSeats']
            seat_timeout = cached_member_data['seatTimeoutInMinutes']
        else:
            member = self.getMember(login)
            # get the max_seats property from the member data tool
            if member is not None:
                max_seats = member.getProperty("max_seats")
                seat_timeout = member.getProperty("seat_timeout_in_minutes")
                # cache the max_seats for login
                td_seat_timeout = datetime.timedelta(minutes=seat_timeout)
                self.login_member_data_mapping[login] = {
                    'maxSeats': int(max_seats),
                    'seatTimeoutInMinutes': float(seat_timeout),
                    'expireTime': DateTime(now.asdatetime() + td_seat_timeout)
                }

        return {
            'maxSeats': int(max_seats),
            'seatTimeoutInMinutes': float(seat_timeout)
        }
Esempio n. 22
0
    def pwexpiry(self, user):
        pwexpiry_enabled = api.portal.get_registry_record('plone.pwexpiry_enabled', default=False)
        validity_period = api.portal.get_registry_record('plone.pwexpiry_validity_period', default=0)
        if pwexpiry_enabled and validity_period > 0:
            whitelist = api.portal.get_registry_record('plone.pwexpiry_whitelisted_users', default=[])
            whitelisted = whitelist and user.getId() in whitelist
            if not whitelisted:
                password_date = user.getProperty(
                    'password_date',
                    '2000/01/01'
                )
                current_time = DateTime()
                editableUser = api.user.get(username=user.getId())
                if password_date.strftime('%Y/%m/%d') != '2000/01/01':
                    since_last_pw_reset = days_since_event(
                        password_date.asdatetime(),
                        current_time.asdatetime()
                    )

                    '''
                    depending how you intepret the setting, it might make
                    more sense to check if it's <= 0 instead.
                    Leaving as strictly LT for now.
                    '''
                    if validity_period - since_last_pw_reset < 0:
                        # Password has expired
                        editableUser.setMemberProperties({
                            'reset_password_required': True,
                            'reset_password_time': time.time()
                        })
                        return True
                else:
                    editableUser.setMemberProperties({
                        'password_date': current_time
                    })
        return False
Esempio n. 23
0
def notify_and_expire():
    """
    For each registered user check all the conditions and execute
    the notification action
    """

    portal = api.portal.get()
    registry = getUtility(IRegistry)
    validity_period = registry['collective.pwexpiry.validity_period']

    if validity_period == 0:
        # do not do any notifications, if password expiration has been disabled
        return

    notifications_to_use = set()
    if 'collective.pwexpiry.notification_actions' in registry:
        notifications_to_use = registry[
            'collective.pwexpiry.notification_actions']
    current_time = portal.ZopeTime()
    local_tz = current_time.timezone()
    for user_id in portal.acl_users.source_users.getUserIds():
        user = portal.portal_membership.getMemberById(user_id)
        password_date = DateTime(
            user.getProperty('password_date', '2000/01/01'))
        last_notification_date = DateTime(
            user.getProperty('last_notification_date', '2000/01/01'))
        last_notification_date = last_notification_date.toZone(local_tz)
        if password_date == DateTime('2000/01/01'):
            # The user has not set the changed the password yet - the current time
            # is set as the initial value
            user.setMemberProperties({'password_date': current_time})
            logger.info('Set new password reset date for user: %s' % user_id)
        else:
            # Counting days difference since the user reset his password
            since_last_pw_reset = days_since_event(password_date.asdatetime(),
                                                   current_time.asdatetime())
            # Counting days difference since the notification has been sent to the user
            since_last_notification = days_since_event(
                last_notification_date.asdatetime(), current_time.asdatetime())
            # Number of days before the user's password expires
            days_to_expire = validity_period - since_last_pw_reset

            # Search for registered notifications and execute them
            notifications = getAdapters((portal, ), IExpirationCheck)
            for notification_name, notification in notifications:
                if notifications_to_use and notification_name not in notifications_to_use:
                    msg = (
                        "Skipping notification %s because it is not in "
                        "registry['collective.pwexpiry.notification_actions']")
                    logger.debug(msg % notification_name)
                    continue
                if notification(days_to_expire):
                    try:
                        # Protection of sending the expired notification email twice
                        pwres_to_notif = days_since_event(
                            password_date.asdatetime(),
                            last_notification_date.asdatetime())
                        if pwres_to_notif > validity_period:
                            logger.warning('Omitting notification for user: \'%s\' ' \
                                           'because the expiration email has already ' \
                                           'been sent once.' % (user_id))
                            break
                        # Protection of sending the notification email twice
                        if since_last_notification < 1:
                            logger.warning('Omitting notification for user: \'%s\' ' \
                                           'because the notification has been already ' \
                                           'sent today.' % (user_id))
                            break

                        # Executing the notification action and updating user's property
                        notification.notification_action(user, days_to_expire)
                        logger.info('Triggered %s action for user: %s' %
                                    (notification_name, user_id))
                        user.setMemberProperties(
                            {'last_notification_date': current_time})
                    except Exception, exc:
                        # Continue with the script even in case of problems
                        logger.error('Error while performing notification: %s ' \
                                  'for user: %s: %s' % (notification_name, user_id, exc))
                        continue
Esempio n. 24
0
class DashboardView(BrowserView):
    template = ViewPageTemplateFile("templates/dashboard.pt")

    def __init__(self, context, request):
        BrowserView.__init__(self, context, request)
        self.dashboard_cookie = None
        self.member = None

    def __call__(self):
        frontpage_url = self.portal_url + "/senaite-frontpage"
        if not self.context.bika_setup.getDashboardByDefault():
            # Do not render dashboard, render frontpage instead
            self.request.response.redirect(frontpage_url)
            return

        mtool = getToolByName(self.context, 'portal_membership')
        if mtool.isAnonymousUser():
            # Anonymous user, redirect to frontpage
            self.request.response.redirect(frontpage_url)
            return

        self.member = mtool.getAuthenticatedMember()
        self._init_date_range()
        self.dashboard_cookie = self.check_dashboard_cookie()
        return self.template()

    def check_dashboard_cookie(self):
        """
        Check if the dashboard cookie should exist through bikasetup
        configuration.

        If it should exist but doesn't exist yet, the function creates it
        with all values as default.
        If it should exist and already exists, it returns the value.
        Otherwise, the function returns None.

        :return: a dictionary of strings
        """
        # Getting cookie
        cookie_raw = self.request.get(DASHBOARD_FILTER_COOKIE, None)
        # If it doesn't exist, create it with default values
        if cookie_raw is None:
            cookie_raw = self._create_raw_data()
            self.request.response.setCookie(
                DASHBOARD_FILTER_COOKIE,
                json.dumps(cookie_raw),
                quoted=False,
                path='/')
            return cookie_raw
        return get_strings(json.loads(cookie_raw))

    def is_filter_selected(self, selection_id, value):
        """
        Compares whether the 'selection_id' parameter value saved in the
        cookie is the same value as the "value" parameter.

        :param selection_id: a string as a dashboard_cookie key.
        :param value: The value to compare against the value from
        dashboard_cookie key.
        :return: Boolean.
        """
        selected = self.dashboard_cookie.get(selection_id)
        return selected == value

    def is_admin_user(self):
        """
        Checks if the user is the admin or a SiteAdmin user.
        :return: Boolean
        """
        user = api.user.get_current()
        roles = user.getRoles()
        return "LabManager" in roles or "Manager" in roles

    def _create_raw_data(self):
        """
        Gathers the different sections ids and creates a string as first
        cookie data.

        :return: A dictionary like:
            {'analyses':'all','analysisrequest':'all','worksheets':'all'}
        """
        result = {}
        for section in self.get_sections():
            result[section.get('id')] = 'all'
        return result

    def _init_date_range(self):
        """ Sets the date range from which the data must be retrieved.
            Sets the values to the class parameters 'date_from',
            'date_to', 'date_range', and self.periodicity
            Calculates the date range according to the value of the
            request's 'p' parameter:
            - 'd' (daily)
            - 'w' (weekly)
            - 'm' (monthly)
            - 'q' (quarterly)
            - 'b' (biannual)
            - 'y' (yearly)
            - 'a' (all-time)
        """
        # By default, weekly
        self.periodicity = self.request.get('p', 'w')
        if (self.periodicity == 'd'):
            # Daily
            self.date_from = DateTime()
            self.date_to = DateTime() + 1
            # For time-evolution data, load last 30 days
            self.min_date = self.date_from - 30
        elif (self.periodicity == 'm'):
            # Monthly
            today = datetime.date.today()
            self.date_from = DateTime(today.year, today.month, 1)
            self.date_to = DateTime(today.year, today.month, monthrange(today.year, today.month)[1], 23, 59, 59)
            # For time-evolution data, load last two years
            min_year = today.year - 1 if today.month == 12 else today.year - 2
            min_month = 1 if today.month == 12 else today.month
            self.min_date = DateTime(min_year, min_month, 1)
        elif (self.periodicity == 'q'):
            # Quarterly
            today = datetime.date.today()
            m = (((today.month-1)/3)*3)+1
            self.date_from = DateTime(today.year, m, 1)
            self.date_to = DateTime(today.year, m+2, monthrange(today.year, m+2)[1], 23, 59, 59)
            # For time-evolution data, load last four years
            min_year = today.year - 4 if today.month == 12 else today.year - 5
            self.min_date = DateTime(min_year, m, 1)
        elif (self.periodicity == 'b'):
            # Biannual
            today = datetime.date.today()
            m = (((today.month-1)/6)*6)+1
            self.date_from = DateTime(today.year, m, 1)
            self.date_to = DateTime(today.year, m+5, monthrange(today.year, m+5)[1], 23, 59, 59)
            # For time-evolution data, load last ten years
            min_year = today.year - 10 if today.month == 12 else today.year - 11
            self.min_date = DateTime(min_year, m, 1)
        elif (self.periodicity == 'y'):
            # Yearly
            today = datetime.date.today()
            self.date_from = DateTime(today.year, 1, 1)
            self.date_to = DateTime(today.year, 12, 31, 23, 59, 59)
            # For time-evolution data, load last 15 years
            min_year = today.year - 15 if today.month == 12 else today.year - 16
            self.min_date = DateTime(min_year, 1, 1)
        elif (self.periodicity == 'a'):
            # All time
            today = datetime.date.today()
            self.date_from = DateTime('1990-01-01 00:00:00')
            self.date_to = DateTime(today.year, 12, 31, 23, 59, 59)
            # For time-evolution data, load last 15 years
            min_year = today.year - 15 if today.month == 12 else today.year - 16
            self.min_date = DateTime(min_year, 1, 1)
        else:
            # weekly
            today = datetime.date.today()
            year, weeknum, dow = today.isocalendar()
            self.date_from = DateTime() - dow
            self.date_to = self.date_from + 7
            # For time-evolution data, load last six months
            min_year = today.year if today.month > 6 else today.year - 1
            min_month = today.month - 6 if today.month > 6 else (today.month - 6)+12
            self.min_date = DateTime(min_year, min_month, 1)

        self.date_range = {'query': (self.date_from, self.date_to), 'range': 'min:max'}
        self.base_date_range = {'query': (DateTime('1990-01-01 00:00:00'), DateTime()+1), 'range':'min:max'}
        self.min_date_range = {'query': (self.min_date, self.date_to), 'range': 'min:max'}

    def get_sections(self):
        """ Returns an array with the sections to be displayed.
            Every section is a dictionary with the following structure:
                {'id': <section_identifier>,
                 'title': <section_title>,
                'panels': <array of panels>}
        """
        sections = []
        user = api.user.get_current()
        if is_panel_visible_for_user('analyses', user):
            sections.append(self.get_analyses_section())
        if is_panel_visible_for_user('analysisrequests', user):
            sections.append(self.get_analysisrequests_section())
        if is_panel_visible_for_user('worksheets', user):
            sections.append(self.get_worksheets_section())
        if is_panel_visible_for_user('samples', user):
            sections.append(self.get_samples_section())
        return sections

    def get_filter_options(self):
        """
        Returns dasboard filter options.
        :return: Boolean
        """
        dash_opt = DisplayList((
            ('all', _('All')),
            ('mine', _('Mine')),
        ))
        return dash_opt

    def _getStatistics(self, name, description, url, catalog, criterias, total):
        out = {'type':        'simple-panel',
               'name':        name,
               'class':       'informative',
               'description': description,
               'total':       total,
               'link':        self.portal_url + '/' + url}

        results = 0
        ratio = 0
        if total > 0:
            results = len(catalog(criterias))
            results = results if total >= results else total
            ratio = (float(results)/float(total))*100 if results > 0 else 0
        ratio = str("%%.%sf" % 1) % ratio
        out['legend'] = _('of') + " " + str(total) + ' (' + ratio +'%)'
        out['number'] = results
        out['percentage'] = float(ratio)
        return out

    def get_analysisrequests_section(self):
        """ Returns the section dictionary related with Analysis
            Requests, that contains some informative panels (like
            ARs to be verified, ARs to be published, etc.)
        """
        out = []
        catalog = getToolByName(self.context, CATALOG_ANALYSIS_REQUEST_LISTING)
        query = {'portal_type': "AnalysisRequest",
                 'cancellation_state': ['active']}
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering()
        if filtering_allowed:
            cookie_dep_uid = self.request.get(FILTER_BY_DEPT_COOKIE_ID, '').split(',') if filtering_allowed else ''
            query['getDepartmentUIDs'] = { "query": cookie_dep_uid,"operator":"or" }

        # Check if dashboard_cookie contains any values to query
        # elements by
        query = self._update_criteria_with_filters(query, 'analysisrequests')

        # Active Analysis Requests (All)
        total = len(catalog(query))

        # Sampling workflow enabled?
        if (self.context.bika_setup.getSamplingWorkflowEnabled()):
            # Analysis Requests awaiting to be sampled or scheduled
            name = _('Analysis Requests to be sampled')
            desc = _("To be sampled")
            purl = 'samples?samples_review_state=to_be_sampled'
            query['review_state'] = ['to_be_sampled', ]
            out.append(self._getStatistics(name, desc, purl, catalog, query, total))

            # Analysis Requests awaiting to be preserved
            name = _('Analysis Requests to be preserved')
            desc = _("To be preserved")
            purl = 'samples?samples_review_state=to_be_preserved'
            query['review_state'] = ['to_be_preserved', ]
            out.append(self._getStatistics(name, desc, purl, catalog, query, total))

            # Analysis Requests scheduled for Sampling
            name = _('Analysis Requests scheduled for sampling')
            desc = _("Sampling scheduled")
            purl = 'samples?samples_review_state=scheduled_sampling'
            query['review_state'] = ['scheduled_sampling', ]
            out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests awaiting for reception
        name = _('Analysis Requests to be received')
        desc = _("Reception pending")
        purl = 'analysisrequests?analysisrequests_review_state=sample_due'
        query['review_state'] = ['sample_due', ]
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests under way
        name = _('Analysis Requests with results pending')
        desc = _("Results pending")
        purl = 'analysisrequests?analysisrequests_review_state=sample_received'
        query['review_state'] = ['attachment_due',
                                 'sample_received',
                                 'assigned']
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests to be verified
        name = _('Analysis Requests to be verified')
        desc = _("To be verified")
        purl = 'analysisrequests?analysisrequests_review_state=to_be_verified'
        query['review_state'] = ['to_be_verified', ]
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests verified (to be published)
        name = _('Analysis Requests verified')
        desc = _("Verified")
        purl = 'analysisrequests?analysisrequests_review_state=verified'
        query['review_state'] = ['verified', ]
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests published
        name = _('Analysis Requests published')
        desc = _("Published")
        purl = 'analysisrequests?analysisrequests_review_state=published'
        query['review_state'] = ['published', ]
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests to be printed
        if self.context.bika_setup.getPrintingWorkflowEnabled():
            name = _('Analysis Requests to be printed')
            desc = _("To be printed")
            purl = 'analysisrequests?analysisrequests_getPrinted=0'
            query['getPrinted'] = '0'
            query['review_state'] = ['published', ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

        # Chart with the evolution of ARs over a period, grouped by
        # periodicity
        if 'review_state' in query:
            del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(catalog, query)
        out.append({'type':         'bar-chart-panel',
                    'name':         _('Evolution of Analysis Requests'),
                    'class':        'informative',
                    'description':  _('Evolution of Analysis Requests'),
                    'data':         json.dumps(outevo),
                    'datacolors':   json.dumps(self.get_colors_palette())})

        return {'id': 'analysisrequests',
                'title': _('Analysis Requests'),
                'panels': out}

    def get_worksheets_section(self):
        """ Returns the section dictionary related with Worksheets,
            that contains some informative panels (like
            WS to be verified, WS with results pending, etc.)
        """
        out = []
        bc = getToolByName(self.context, CATALOG_WORKSHEET_LISTING)
        query = {'portal_type':"Worksheet",}
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering()
        if filtering_allowed:
            cookie_dep_uid = self.request.get(FILTER_BY_DEPT_COOKIE_ID, '').split(',') if filtering_allowed else ''
            query['getDepartmentUIDs'] = { "query": cookie_dep_uid,"operator":"or" }

        # Check if dashboard_cookie contains any values to query
        # elements by
        query = self._update_criteria_with_filters(query, 'worksheets')

        # Active Worksheets (all)
        total = len(bc(query))

        # Open worksheets
        name = _('Results pending')
        desc = _('Results pending')
        purl = 'worksheets?list_review_state=open'
        query['review_state'] = ['open', 'attachment_due']
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Worksheets to be verified
        name = _('To be verified')
        desc =_('To be verified')
        purl = 'worksheets?list_review_state=to_be_verified'
        query['review_state'] = ['to_be_verified', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Worksheets verified
        name = _('Verified')
        desc =_('Verified')
        purl = 'worksheets?list_review_state=verified'
        query['review_state'] = ['verified', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Chart with the evolution of WSs over a period, grouped by
        # periodicity
        del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(bc, query)
        out.append({'type':         'bar-chart-panel',
                    'name':         _('Evolution of Worksheets'),
                    'class':        'informative',
                    'description':  _('Evolution of Worksheets'),
                    'data':         json.dumps(outevo),
                    'datacolors':   json.dumps(self.get_colors_palette())})

        return {'id': 'worksheets',
                'title': _('Worksheets'),
                'panels': out}

    def get_analyses_section(self):
        """ Returns the section dictionary related with Analyses,
            that contains some informative panels (analyses pending
            analyses assigned, etc.)

            sample_registered, not_requested, published, retracted,
            sample_due, sample_received, sample_prep, sampled, to_be_preserved,
            to_be_sampled, , to_be_verified, rejected, verified, to_be_verified,
            assigned
        """
        out = []
        bc = getToolByName(self.context, CATALOG_ANALYSIS_LISTING)
        query = {'portal_type': "Analysis",
                 'cancellation_state': ['active']}
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering()
        if filtering_allowed:
            cookie_dep_uid = self.request.get(FILTER_BY_DEPT_COOKIE_ID, '').split(',') if filtering_allowed else ''
            query['getDepartmentUID'] = { "query": cookie_dep_uid,"operator":"or" }

        # Check if dashboard_cookie contains any values to query elements by
        query = self._update_criteria_with_filters(query, 'analyses')

        # Active Analyses (All)
        total = len(bc(query))

        # Analyses to be assigned
        name = _('Assignment pending')
        desc = _('Assignment pending')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['sample_received', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses pending
        name = _('Results pending')
        desc = _('Results pending')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['assigned','attachment_due']
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses to be verified
        name = _('To be verified')
        desc = _('To be verified')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['to_be_verified', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses verified
        name = _('Verified')
        desc = _('Verified')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['verified', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Chart with the evolution of Analyses over a period, grouped by
        # periodicity
        del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(bc, query)
        out.append({'type':         'bar-chart-panel',
                    'name':         _('Evolution of Analyses'),
                    'class':        'informative',
                    'description':  _('Evolution of Analyses'),
                    'data':         json.dumps(outevo),
                    'datacolors':   json.dumps(self.get_colors_palette())})
        return {'id': 'analyses',
                'title': _('Analyses'),
                'panels': out}

    def get_samples_section(self):
        out = []
        catalog = getToolByName(self.context, 'portal_catalog')
        query = {'portal_type': "Sample",
                 'cancellation_state': ['active']}
        filtering_allowed = \
            self.context.bika_setup.getAllowDepartmentFiltering()
        if filtering_allowed:
            cookie_dep_uid = self.request\
                .get(FILTER_BY_DEPT_COOKIE_ID, '')\
                .split(',') if filtering_allowed else ''
            query['getDepartmentUIDs'] = {"query": cookie_dep_uid,
                                          "operator": "or"}

        # Check if dashboard_cookie contains any values to query
        # elements by
        query = self._update_criteria_with_filters(query, 'samples')

        # Active Samples (All)
        total = len(catalog(query))

        # Sampling workflow enabled?
        if self.context.bika_setup.getSamplingWorkflowEnabled():
            # Analysis Requests awaiting to be sampled or scheduled
            name = _('Samples to be sampled')
            desc = _("To be sampled")
            purl = 'samples?samples_review_state=to_be_sampled'
            query['review_state'] = ['to_be_sampled', ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

            # Samples awaiting to be preserved
            name = _('Samples to be preserved')
            desc = _("To be preserved")
            purl = 'samples?samples_review_state=to_be_preserved'
            query['review_state'] = ['to_be_preserved', ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

            # Samples scheduled for Sampling
            name = _('Samples scheduled for sampling')
            desc = _("Sampling scheduled")
            purl = 'samples?samples_review_state=scheduled_sampling'
            query['review_state'] = ['scheduled_sampling', ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

        # Samples awaiting for reception
        name = _('Samples to be received')
        desc = _("Reception pending")
        purl = 'samples?samples_review_state=sample_due'
        query['review_state'] = ['sample_due', ]
        out.append(
            self._getStatistics(name, desc, purl, catalog, query, total))

        # Samples under way
        name = _('Samples received')
        desc = _("Samples received")
        purl = 'samples?samples_review_state=sample_received'
        query['review_state'] = ['sample_received', ]
        out.append(
            self._getStatistics(name, desc, purl, catalog, query, total))

        # Samples rejected
        name = _('Samples rejected')
        desc = _("Samples rejected")
        purl = 'samples?samples_review_state=rejected'
        query['review_state'] = ['rejected', ]
        out.append(
            self._getStatistics(name, desc, purl, catalog, query, total))

        # Chart with the evolution of samples over a period, grouped by
        # periodicity
        if 'review_state' in query:
            del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(catalog, query)
        out.append({'type': 'bar-chart-panel',
                    'name': _('Evolution of Samples'),
                    'class': 'informative',
                    'description': _('Evolution of Samples'),
                    'data': json.dumps(outevo),
                    'datacolors': json.dumps(self.get_colors_palette())})

        return {'id': 'samples',
                'title': _('Samples'),
                'panels': out}

    def get_states_map(self, portal_type):
        if portal_type == 'Analysis':
            return {'to_be_sampled':   _('Sample reception pending'),
                    'sample_due':      _('Sample reception pending'),
                    'sample_received': _('Assignment pending'),
                    'assigned':        _('Results pending'),
                    'attachment_due':  _('Results pending'),
                    'to_be_verified':  _('To be verified'),
                    'rejected':        _('Rejected'),
                    'retracted':       _('Retracted'),
                    'verified':        _('Verified'),
                    'published':       _('Published')}
        elif portal_type == 'AnalysisRequest':
            return {'to_be_sampled':       _('To be sampled'),
                    'to_be_preserved':     _('To be preserved'),
                    'scheduled_sampling':  _('Sampling scheduled'),
                    'sample_due':          _('Reception pending'),
                    'rejected':            _('Rejected'),
                    'sample_received':     _('Results pending'),
                    'assigned':            _('Results pending'),
                    'attachment_due':      _('Results pending'),
                    'to_be_verified':      _('To be verified'),
                    'verified':            _('Verified'),
                    'published':           _('Published')}
        elif portal_type == 'Worksheet':
            return {'open':            _('Results pending'),
                    'attachment_due':  _('Results pending'),
                    'to_be_verified':  _('To be verified'),
                    'verified':        _('Verified')}

        elif portal_type == 'Sample':
            return {'to_be_sampled':       _('To be sampled'),
                    'to_be_preserved':     _('To be preserved'),
                    'scheduled_sampling':  _('Sampling scheduled'),
                    'sample_due':          _('Reception pending'),
                    'rejected':            _('Rejected'),
                    'sample_received':     _('Sample received'), }

    def get_colors_palette(self):
        return {
            'to_be_sampled':                '#FA6900',
            _('To be sampled'):             '#FA6900',

            'to_be_preserved':              '#C44D58',
            _('To be preserved'):           '#C44D58',

            'scheduled_sampling':           '#FA6900',
            _('Sampling scheduled'):        '#FA6900',

            'sample_due':                   '#F38630',
            _('Sample reception pending'):  '#F38630',
            _('Reception pending'):         '#F38630',

            'sample_received':              '#E0E4CC',
            _('Assignment pending'):        '#E0E4CC',

            'assigned':                     '#dcdcdc',
            'attachment_due':               '#dcdcdc',
            'open':                         '#dcdcdc',
            _('Results pending'):           '#dcdcdc',

            'rejected':                     '#FF6B6B',
            'retracted':                    '#FF6B6B',
            _('Rejected'):                  '#FF6B6B',
            _('Retracted'):                 '#FF6B6B',

            'to_be_verified':               '#A7DBD8',
            _('To be verified'):            '#A7DBD8',

            'verified':                     '#69D2E7',
            _('Verified'):                  '#69D2E7',

            'published':                    '#83AF9B',
            _('Published'):                 '#83AF9B',
        }

    def _getDateStr(self, period, created):
        if period == 'y':
            created = created.year()
        elif period == 'b':
            m = (((created.month()-1)/6)*6)+1
            created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2))
        elif period == 'q':
            m = (((created.month()-1)/3)*3)+1
            created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2))
        elif period == 'm':
            created = '%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2))
        elif period == 'w':
            d = (((created.day()-1)/7)*7)+1
            year, weeknum, dow = created.asdatetime().isocalendar()
            created = created - dow
            created = '%s-%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2), str(created.day()).zfill(2))
        elif period == 'a':
            # All time, but evolution chart grouped by year
            created = created.year()
        else:
            created = '%s-%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2), str(created.day()).zfill(2))
        return created

    def _fill_dates_evo(self, catalog, query):
        outevoidx = {}
        outevo = []
        days = 1
        if self.periodicity == 'y':
            days = 336
        elif self.periodicity == 'b':
            days = 168
        elif self.periodicity == 'q':
            days = 84
        elif self.periodicity == 'm':
            days = 28
        elif self.periodicity == 'w':
            days = 7
        elif self.periodicity == 'a':
            days = 336

        otherstate = _('Other status')
        statesmap = self.get_states_map(query['portal_type'])
        stats = statesmap.values()
        stats.sort()
        stats.append(otherstate)
        statscount = {s:0 for s in stats}
        # Add first all periods, cause we want all segments to be displayed
        curr = self.min_date.asdatetime()
        end = self.date_to.asdatetime()
        while curr < end:
            currstr = self._getDateStr(self.periodicity, DateTime(curr))
            if currstr not in outevoidx:
                outdict = {'date':currstr}
                for k in stats:
                    outdict[k] = 0
                outevo.append(outdict)
                outevoidx[currstr] = len(outevo)-1
            curr = curr + datetime.timedelta(days=days)
        for brain in catalog(query):
            # Check if we can use the brain
            if query.get('portal_type', '') in ['AnalysisRequest', 'Analysis']:
                created = brain.created
            # I not, get the object
            else:
                created = brain.getObject().created()
            state = brain.review_state
            if state not in statesmap:
                logger.warn("'%s' State for '%s' not available" % (state, query['portal_type']))
            state = statesmap[state] if state in statesmap else otherstate
            created = self._getDateStr(self.periodicity, created)
            if created in outevoidx:
                oidx = outevoidx[created]
                statscount[state] += 1
                if state in outevo[oidx]:
                    outevo[oidx][state] += 1
                else:
                    outevo[oidx][state] = 1
            else:
                # Create new row
                currow = {'date': created,
                          state: 1 }
                outevo.append(currow)

        # Remove all those states for which there is no data
        rstates = [k for k,v in statscount.items() if v==0]
        for o in outevo:
            for r in rstates:
                if r in o:
                    del o[r]

        return outevo

    def _update_criteria_with_filters(self, query, section_name):
        """
        This method updates the 'query' dictionary with the criteria stored in
        dashboard cookie.

        :param query: A dictionary with search criteria.
        :param section_name: The dashboard section name
        :return: The 'query' dictionary
        """
        if self.dashboard_cookie is None:
            return query
        cookie_criteria = self.dashboard_cookie.get(section_name)
        if cookie_criteria == 'mine':
            query['Creator'] = self.member.getId()
        return query

    def get_dashboard_panels_visibility(self, section_name):
        """
        Return a list of pairs as values that represents the role-permission
        view relation for the panel section.
        :param section_name: the panels section id.
        :return: a list of tuples.
        """
        return get_dashboard_panels_visibility_by_section(section_name)
Esempio n. 25
0
class DashboardView(BrowserView):
    template = ViewPageTemplateFile("templates/dashboard.pt")

    def __call__(self):
        tofrontpage = True
        mtool=getToolByName(self.context, 'portal_membership')
        if not mtool.isAnonymousUser() and self.context.bika_setup.getDashboardByDefault():
            # If authenticated user with labman role,
            # display the Main Dashboard view
            pm = getToolByName(self.context, "portal_membership")
            member = pm.getAuthenticatedMember()
            roles = member.getRoles()
            tofrontpage = 'Manager' not in roles and 'LabManager' not in roles

        if tofrontpage == True:
            self.request.response.redirect(self.portal_url + "/bika-frontpage")
        else:
            self._init_date_range()
            return self.template()

    def _init_date_range(self):
        """ Sets the date range from which the data must be retrieved.
            Sets the values to the class parameters 'date_from',
            'date_to', 'date_range', and self.periodicity
            Calculates the date range according to the value of the
            request's 'p' parameter:
            - 'd' (daily)
            - 'w' (weekly)
            - 'm' (monthly)
            - 'q' (quarterly)
            - 'b' (biannual)
            - 'y' (yearly)
            - 'a' (all-time)
        """
        # By default, weekly
        self.periodicity = self.request.get('p', 'w')
        if (self.periodicity == 'd'):
            # Daily
            self.date_from = DateTime()
            self.date_to = DateTime() + 1
            # For time-evolution data, load last 30 days
            self.min_date = self.date_from - 30
        elif (self.periodicity == 'm'):
            # Monthly
            today = datetime.date.today()
            self.date_from = DateTime(today.year, today.month, 1)
            self.date_to = DateTime(today.year, today.month, monthrange(today.year, today.month)[1], 23, 59, 59)
            # For time-evolution data, load last two years
            min_year = today.year - 1 if today.month == 12 else today.year - 2
            min_month = 1 if today.month == 12 else today.month
            self.min_date = DateTime(min_year, min_month, 1)
        elif (self.periodicity == 'q'):
            # Quarterly
            today = datetime.date.today()
            m = (((today.month-1)/3)*3)+1
            self.date_from = DateTime(today.year, m, 1)
            self.date_to = DateTime(today.year, m+2, monthrange(today.year, m+2)[1], 23, 59, 59)
            # For time-evolution data, load last four years
            min_year = today.year - 4 if today.month == 12 else today.year - 5
            self.min_date = DateTime(min_year, m, 1)
        elif (self.periodicity == 'b'):
            # Biannual
            today = datetime.date.today()
            m = (((today.month-1)/6)*6)+1
            self.date_from = DateTime(today.year, m, 1)
            self.date_to = DateTime(today.year, m+5, monthrange(today.year, m+5)[1], 23, 59, 59)
            # For time-evolution data, load last ten years
            min_year = today.year - 10 if today.month == 12 else today.year - 11
            self.min_date = DateTime(min_year, m, 1)
        elif (self.periodicity == 'y'):
            # Yearly
            today = datetime.date.today()
            self.date_from = DateTime(today.year, 1, 1)
            self.date_to = DateTime(today.year, 12, 31, 23, 59, 59)
            # For time-evolution data, load last 15 years
            min_year = today.year - 15 if today.month == 12 else today.year - 16
            self.min_date = DateTime(min_year, 1, 1)
        elif (self.periodicity == 'a'):
            # All time
            today = datetime.date.today()
            self.date_from = DateTime('1990-01-01 00:00:00')
            self.date_to = DateTime(today.year, 12, 31, 23, 59, 59)
            # For time-evolution data, load last 15 years
            min_year = today.year - 15 if today.month == 12 else today.year - 16
            self.min_date = DateTime(min_year, 1, 1)
        else:
            # weekly
            today = datetime.date.today()
            year, weeknum, dow = today.isocalendar()
            self.date_from = DateTime() - dow
            self.date_to = self.date_from + 7
            # For time-evolution data, load last six months
            min_year = today.year if today.month > 6 else today.year - 1
            min_month = today.month - 6 if today.month > 6 else (today.month - 6)+12
            self.min_date = DateTime(min_year, min_month, 1)

        self.date_range = {'query': (self.date_from, self.date_to), 'range': 'min:max'}
        self.base_date_range = {'query': (DateTime('1990-01-01 00:00:00'), DateTime()+1), 'range':'min:max'}
        self.min_date_range = {'query': (self.min_date, self.date_to), 'range': 'min:max'}

    def get_sections(self):
        """ Returns an array with the sections to be displayed.
            Every section is a dictionary with the following structure:
                {'id': <section_identifier>,
                 'title': <section_title>,
                'panels': <array of panels>}
        """
        sections = [self.get_analyses_section(),
                    self.get_analysisrequests_section(),
                    self.get_worksheets_section()]
        return sections

    def _getStatistics(self, name, description, url, catalog, criterias, total):
        out = {'type':        'simple-panel',
               'name':        name,
               'class':       'informative',
               'description': description,
               'total':       total,
               'link':        self.portal_url + '/' + url}

        results = 0
        ratio = 0
        if total > 0:
            results = len(catalog(criterias))
            results = results if total >= results else total
            ratio = (float(results)/float(total))*100 if results > 0 else 0
        ratio = str("%%.%sf" % 1) % ratio
        out['legend'] = _('of') + " " + str(total) + ' (' + ratio +'%)'
        out['number'] = results
        return out

    def get_analysisrequests_section(self):
        """ Returns the section dictionary related with Analysis
            Requests, that contains some informative panels (like
            ARs to be verified, ARs to be published, etc.)
        """
        out = []
        catalog = getToolByName(self.context, CATALOG_ANALYSIS_REQUEST_LISTING)
        query = {'portal_type': "AnalysisRequest",
                 'cancellation_state': ['active']}
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering()
        if filtering_allowed:
            cookie_dep_uid = self.request.get('filter_by_department_info', '').split(',') if filtering_allowed else ''
            query['getDepartmentUIDs'] = { "query": cookie_dep_uid,"operator":"or" }

        # Active Analysis Requests (All)
        total = len(catalog(query))

        # Sampling workflow enabled?
        if (self.context.bika_setup.getSamplingWorkflowEnabled()):
            # Analysis Requests awaiting to be sampled or scheduled
            name = _('Analysis Requests to be sampled')
            desc = _("To be sampled")
            purl = 'samples?samples_review_state=to_be_sampled'
            query['review_state'] = ['to_be_sampled', ]
            out.append(self._getStatistics(name, desc, purl, catalog, query, total))

            # Analysis Requests awaiting to be preserved
            name = _('Analysis Requests to be preserved')
            desc = _("To be preserved")
            purl = 'samples?samples_review_state=to_be_preserved'
            query['review_state'] = ['to_be_preserved', ]
            out.append(self._getStatistics(name, desc, purl, catalog, query, total))

            # Analysis Requests scheduled for Sampling
            name = _('Analysis Requests scheduled for sampling')
            desc = _("Sampling scheduled")
            purl = 'samples?samples_review_state=scheduled_sampling'
            query['review_state'] = ['scheduled_sampling', ]
            out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests awaiting for reception
        name = _('Analysis Requests to be received')
        desc = _("Reception pending")
        purl = 'analysisrequests?analysisrequests_review_state=sample_due'
        query['review_state'] = ['sample_due', ]
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests under way
        name = _('Analysis Requests with results pending')
        desc = _("Results pending")
        purl = 'analysisrequests?analysisrequests_review_state=sample_received'
        query['review_state'] = ['attachment_due',
                                 'sample_received',
                                 'assigned']
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests to be verified
        name = _('Analysis Requests to be verified')
        desc = _("To be verified")
        purl = 'analysisrequests?analysisrequests_review_state=to_be_verified'
        query['review_state'] = ['to_be_verified', ]
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests verified (to be published)
        name = _('Analysis Requests verified')
        desc = _("Verified")
        purl = 'analysisrequests?analysisrequests_review_state=verified'
        query['review_state'] = ['verified', ]
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests published
        name = _('Analysis Requests published')
        desc = _("Published")
        purl = 'analysisrequests?analysisrequests_review_state=published'
        query['review_state'] = ['published', ]
        out.append(self._getStatistics(name, desc, purl, catalog, query, total))

        # Chart with the evolution of ARs over a period, grouped by
        # periodicity
        del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(catalog, query)
        out.append({'type':         'bar-chart-panel',
                    'name':         _('Evolution of Analysis Requests'),
                    'class':        'informative',
                    'description':  _('Evolution of Analysis Requests'),
                    'data':         json.dumps(outevo),
                    'datacolors':   json.dumps(self.get_colors_palette())})

        return {'id': 'analysisrequests',
                'title': _('Analysis Requests'),
                'panels': out}

    def get_worksheets_section(self):
        """ Returns the section dictionary related with Worksheets,
            that contains some informative panels (like
            WS to be verified, WS with results pending, etc.)
        """
        out = []
        bc = getToolByName(self.context, CATALOG_WORKSHEET_LISTING)
        query = {'portal_type':"Worksheet",}
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering()
        if filtering_allowed:
            cookie_dep_uid = self.request.get('filter_by_department_info', '').split(',') if filtering_allowed else ''
            query['getDepartmentUIDs'] = { "query": cookie_dep_uid,"operator":"or" }

        # Active Worksheets (all)
        total = len(bc(query))

        # Open worksheets
        name = _('Results pending')
        desc = _('Results pending')
        purl = 'worksheets?list_review_state=open'
        query['review_state'] = ['open', 'attachment_due']
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Worksheets to be verified
        name = _('To be verified')
        desc =_('To be verified')
        purl = 'worksheets?list_review_state=to_be_verified'
        query['review_state'] = ['to_be_verified', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Worksheets verified
        name = _('Verified')
        desc =_('Verified')
        purl = 'worksheets?list_review_state=verified'
        query['review_state'] = ['verified', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Chart with the evolution of WSs over a period, grouped by
        # periodicity
        del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(bc, query)
        out.append({'type':         'bar-chart-panel',
                    'name':         _('Evolution of Worksheets'),
                    'class':        'informative',
                    'description':  _('Evolution of Worksheets'),
                    'data':         json.dumps(outevo),
                    'datacolors':   json.dumps(self.get_colors_palette())})

        return {'id': 'worksheets',
                'title': _('Worksheets'),
                'panels': out}

    def get_analyses_section(self):
        """ Returns the section dictionary related with Analyses,
            that contains some informative panels (analyses pending
            analyses assigned, etc.)

            sample_registered, not_requested, published, retracted,
            sample_due, sample_received, sample_prep, sampled, to_be_preserved,
            to_be_sampled, , to_be_verified, rejected, verified, to_be_verified,
            assigned
        """
        out = []
        bc = getToolByName(self.context, CATALOG_ANALYSIS_LISTING)
        query = {'portal_type': "Analysis",
                 'cancellation_state': ['active']}
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering()
        if filtering_allowed:
            cookie_dep_uid = self.request.get('filter_by_department_info', '').split(',') if filtering_allowed else ''
            query['getDepartmentUID'] = { "query": cookie_dep_uid,"operator":"or" }

        # Active Analyses (All)
        total = len(bc(query))

        # Analyses to be assigned
        name = _('Assignment pending')
        desc = _('Assignment pending')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['sample_received', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses pending
        name = _('Results pending')
        desc = _('Results pending')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['assigned','attachment_due']
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses to be verified
        name = _('To be verified')
        desc = _('To be verified')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['to_be_verified', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses verified
        name = _('Verified')
        desc = _('Verified')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['verified', ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Chart with the evolution of Analyses over a period, grouped by
        # periodicity
        del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(bc, query)
        out.append({'type':         'bar-chart-panel',
                    'name':         _('Evolution of Analyses'),
                    'class':        'informative',
                    'description':  _('Evolution of Analyses'),
                    'data':         json.dumps(outevo),
                    'datacolors':   json.dumps(self.get_colors_palette())})
        return {'id': 'analyses',
                'title': _('Analyses'),
                'panels': out}

    def get_states_map(self, portal_type):
        if portal_type == 'Analysis':
            return {'to_be_sampled':   _('Sample reception pending'),
                    'sample_due':      _('Sample reception pending'),
                    'sample_received': _('Assignment pending'),
                    'assigned':        _('Results pending'),
                    'attachment_due':  _('Results pending'),
                    'to_be_verified':  _('To be verified'),
                    'rejected':        _('Rejected'),
                    'retracted':       _('Retracted'),
                    'verified':        _('Verified'),
                    'published':       _('Published')}
        elif portal_type == 'AnalysisRequest':
            return {'to_be_sampled':       _('To be sampled'),
                    'to_be_preserved':     _('To be preserved'),
                    'scheduled_sampling':  _('Sampling scheduled'),
                    'sample_due':          _('Reception pending'),
                    'rejected':            _('Rejected'),
                    'sample_received':     _('Results pending'),
                    'assigned':            _('Results pending'),
                    'attachment_due':      _('Results pending'),
                    'to_be_verified':      _('To be verified'),
                    'verified':            _('Verified'),
                    'published':           _('Published')}
        elif portal_type == 'Worksheet':
            return {'open':            _('Results pending'),
                    'attachment_due':  _('Results pending'),
                    'to_be_verified':  _('To be verified'),
                    'verified':        _('Verified')}

    def get_colors_palette(self):
        return {
            'to_be_sampled':                '#FA6900',
            _('To be sampled'):             '#FA6900',

            'to_be_preserved':              '#C44D58',
            _('To be preserved'):           '#C44D58',

            'scheduled_sampling':           '#FA6900',
            _('Sampling scheduled'):        '#FA6900',

            'sample_due':                   '#F38630',
            _('Sample reception pending'):  '#F38630',
            _('Reception pending'):         '#F38630',

            'sample_received':              '#E0E4CC',
            _('Assignment pending'):        '#E0E4CC',

            'assigned':                     '#dcdcdc',
            'attachment_due':               '#dcdcdc',
            'open':                         '#dcdcdc',
            _('Results pending'):           '#dcdcdc',

            'rejected':                     '#FF6B6B',
            'retracted':                    '#FF6B6B',
            _('Rejected'):                  '#FF6B6B',
            _('Retracted'):                 '#FF6B6B',

            'to_be_verified':               '#A7DBD8',
            _('To be verified'):            '#A7DBD8',

            'verified':                     '#69D2E7',
            _('Verified'):                  '#69D2E7',

            'published':                    '#83AF9B',
            _('Published'):                 '#83AF9B',
        }

    def _getDateStr(self, period, created):
        if period == 'y':
            created = created.year()
        elif period == 'b':
            m = (((created.month()-1)/6)*6)+1
            created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2))
        elif period == 'q':
            m = (((created.month()-1)/3)*3)+1
            created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2))
        elif period == 'm':
            created = '%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2))
        elif period == 'w':
            d = (((created.day()-1)/7)*7)+1
            year, weeknum, dow = created.asdatetime().isocalendar()
            created = created - dow
            created = '%s-%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2), str(created.day()).zfill(2))
        elif period == 'a':
            # All time, but evolution chart grouped by year
            created = created.year()
        else:
            created = '%s-%s-%s' % (str(created.year())[2:], str(created.month()).zfill(2), str(created.day()).zfill(2))
        return created

    def _fill_dates_evo(self, catalog, query):
        outevoidx = {}
        outevo = []
        days = 1
        if self.periodicity == 'y':
            days = 336
        elif self.periodicity == 'b':
            days = 168
        elif self.periodicity == 'q':
            days = 84
        elif self.periodicity == 'm':
            days = 28
        elif self.periodicity == 'w':
            days = 7
        elif self.periodicity == 'a':
            days = 336

        otherstate = _('Other status')
        statesmap = self.get_states_map(query['portal_type'])
        stats = statesmap.values()
        stats.sort()
        stats.append(otherstate)
        statscount = {s:0 for s in stats}
        # Add first all periods, cause we want all segments to be displayed
        curr = self.min_date.asdatetime()
        end = self.date_to.asdatetime()
        while curr < end:
            currstr = self._getDateStr(self.periodicity, DateTime(curr))
            if currstr not in outevoidx:
                outdict = {'date':currstr}
                for k in stats:
                    outdict[k] = 0
                outevo.append(outdict)
                outevoidx[currstr] = len(outevo)-1
            curr = curr + datetime.timedelta(days=days)
        for brain in catalog(query):
            # Check if we can use the brain
            if query.get('portal_type', '') in ['AnalysisRequest', 'Analysis']:
                created = brain.created
            # I not, get the object
            else:
                created = brain.getObject().created()
            state = brain.review_state
            if state not in statesmap:
                logger.warn("'%s' State for '%s' not available" % (state, query['portal_type']))
            state = statesmap[state] if state in statesmap else otherstate
            created = self._getDateStr(self.periodicity, created)
            if created in outevoidx:
                oidx = outevoidx[created]
                statscount[state] += 1
                if state in outevo[oidx]:
                    outevo[oidx][state] += 1
                else:
                    outevo[oidx][state] = 1
            else:
                # Create new row
                currow = {'date': created,
                          state: 1 }
                outevo.append(currow)

        # Remove all those states for which there is no data
        rstates = [k for k,v in statscount.items() if v==0]
        for o in outevo:
            for r in rstates:
                if r in o:
                    del o[r]

        return outevo
Esempio n. 26
0
    def createFeedback(self, feedback_txt):
        """
        """
        log = logging.getLogger('createFeedback:')
        folder_conteudo = 'Feedback Admin'

        # try:
        #     pasta_manual = self.context.getPhysicalPath()[2]
        # except:
        #     pasta_manual = self.context.id

        site = getSite()
        id_folder_manual = self.context.getPhysicalPath()[2]

        folder_manual = getattr(site, id_folder_manual)

        id_folder = queryUtility(IIDNormalizer).normalize(folder_conteudo)

        if not hasattr(folder_manual, id_folder):
            folder_manual.invokeFactory('Folder',
                                        id=id_folder,
                                        title=folder_conteudo,
                                        exclude_from_nav=True)
            obj = getattr(folder_manual, id_folder)
            if obj:
                obj.setTitle(folder_conteudo)
                obj.setExcludeFromNav(True)
                obj.setLayout('folder_listing')
                obj.reindexObject()

        folderFeedback = getattr(folder_manual, id_folder)
        # folderFeedback = getattr(site, pasta_manual)

        # import pdb; pdb.set_trace()

        paginaContext = {'titulo': self.context.Title(),
                         'uid': self.context.UID(),
                         'caminho': '/'.join(self.context.getPhysicalPath()),
                         }

        zope_DT = DateTime()
        python_dt = zope_DT.asdatetime()
        zope_DT = DateTime(python_dt)
        data_feedback = zope_DT.strftime('%d/%m/%Y-%H:%M')
        data_milisecond = zope_DT.strftime('%s')

        # import pdb; pdb.set_trace()

        titulo_content = 'Feedback ' + ' - ' + data_feedback + ' - ' + paginaContext['uid']
        id_content = 'feedback ' + data_feedback + '-' + data_milisecond
        id = queryUtility(IIDNormalizer).normalize(id_content)

        _createObjectByType('Document',
                            folderFeedback,
                            id,
                            title=titulo_content,
                            description=paginaContext['caminho'],
                            location=paginaContext['uid'],
                            creators='anonimo',
                            text=feedback_txt
                            )

        obj = getattr(folderFeedback, id)
        if obj:
            obj.setTitle(titulo_content)
            obj.setDescription(paginaContext['caminho']),
            obj.setText(feedback_txt),
            obj.setLocation(paginaContext['uid']),
            obj.setCreators('anonimo')
            obj.reindexObject()

        log.info(id)
        msg = 'Obrigado pelo seu retorno!'
        self.utils.addPortalMessage(msg, type='info')
        return self.request.response.redirect(self.url_sucess)
Esempio n. 27
0
    def authenticateCredentials(self, credentials):
        """See IAuthenticationPlugin.

        This plugin will actually never authenticate.

        o We expect the credentials to be those returned by
          ILoginPasswordExtractionPlugin.
        """
        request = self.REQUEST
	alsoProvides(request, IDisableCSRFProtection)

        response = request['RESPONSE']
        pas_instance = self._getPAS()

        login = credentials.get('login')
        password = credentials.get('password')

        if None in (login, password, pas_instance) and (
            credentials.get('source') != 'plone.session'):
            return None
        else:
            session_source = self.session

            ticket = credentials.get('cookie')

            if session_source._shared_secret is not None:
                ticket_data = tktauth.validateTicket(
                    session_source._shared_secret, ticket,
                    timeout=session_source.timeout,
                    mod_auth_tkt=session_source.mod_auth_tkt)
            else:
                ticket_data = None
                manager = queryUtility(IKeyManager)
                if manager is None:
                    return None
                for secret in manager[u"_system"]:
                    if secret is None:
                        continue

                    ticket_data = tktauth.validateTicket(secret, ticket,
                        timeout=session_source.timeout,
                        mod_auth_tkt=session_source.mod_auth_tkt)

                    if ticket_data is not None:
                        break

            if ticket_data is None:
                return None

            (digest, userid, tokens, user_data, timestamp) = ticket_data
            pas = self._getPAS()
            info = pas._verifyUser(pas.plugins, user_id=userid)

            if info is None:
                return None

            login = info['login']

        cookie_val = self.getCookie()
        
        # get max seats from member data property or cache and default to 1 if not set
        try:
            max_seats = self.getMaxSeatsForLogin(login)
        except:
            traceback.print_exc()

        # When debugging, print the maxSeats value that was resolved
        if self.DEBUG:
            print "authenticateCredentials():: Max Seats is " + str( max_seats )

        if max_seats == 1:
            if cookie_val:
                # A cookie value is there.  If it's the same as the value
                # in our mapping, it's fine.  Otherwise we'll force a
                # logout.
                existing = self.mapping1.get(login, None)
                
                if self.DEBUG:
                    if existing:
                        print "authenticateCredentials():: cookie_val is " + cookie_val + ", and active tokens are: " + ', '.join( existing['tokens'] )
                
                if existing and cookie_val not in existing['tokens']:
                    # The cookies values differ, we want to logout the
                    # user by calling resetCredentials.  Note that this
                    # will eventually call our own resetCredentials which
                    # will cleanup our own cookie.
                    try:
                        self.resetAllCredentials(request, response)
                        pas_instance.plone_utils.addPortalMessage(_(
                            u"Someone else logged in under your name.  You have been \
                            logged out"), "error")
                    except:
                        traceback.print_exc()
                elif existing is None:
                    # The browser has the cookie but we don't know about
                    # it.  Let's reset our own cookie:
                    self.setCookie('')
    
            else:
                # When no cookie is present, we generate one, store it and
                # set it in the response:
                cookie_val = uuid()
                # do some cleanup in our mappings
                existing = self.mapping1.get(login)
                
                if existing and 'tokens' in existing:
                    try:
                        if existing['tokens'][0] in self.mapping2:
                            del self.mapping2[existing['tokens'][0]]
                    except:
                        pass
    
                try:
                    from_ip = self.get_ip( request )
                except:
                    traceback.print_exc()

                now = DateTime()
                self.mapping1[login] = { 'tokens':[] }
                self.mapping1[login]['tokens'].append( cookie_val )
                self.mapping2[cookie_val] = {'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime( now.asdatetime() + self.time_to_persist_cookies )}
                self.setCookie(cookie_val)
        else:
            # Max seats is not 1. Treat this as a floating licenses scenario.
            # Nobody is logged out, but once the max seats threshold is reached,
            # active tokens must expire before new users may log in.
            if cookie_val:
                # When the cookie value is there, try to verify it or activate it if is it not added yet
                self.verifyToken( cookie_val, login, max_seats, request, response )
            else:
                if self.DEBUG:
                    print "authenticateCredentials:: Try to issue a token because there is no cookie value."
                    
                # When no cookie is present, attempt to issue a token and use the cookie to store it
                self.issueToken(login, max_seats, request, response)
                # if max_seats are filled, then force logout
                if self.isLoginAtCapacity(login, max_seats):
                    self.forceLogoutForUser(login, request, response)
    
        return None  # Note that we never return anything useful
class TestSyndication(EEAContentTypeTestCase):
    """ Test-cases for syndication. """

    def afterSetUp(self):
        """ Set up
        """
        self.setRoles('Manager')
        self.workflow = self.portal.portal_workflow
        self.effective_date = DateTime(year=2008, month=4, day=3)
        self.start_date = DateTime(year=2007, month=1, day=1)

        self.folder.invokeFactory('Document', id='doc')
        doc = self.folder.doc
        doc.setTitle('Some Document')
        doc.setEffectiveDate(self.effective_date)
        doc.reindexObject()

        self.folder.invokeFactory('QuickEvent', id='event')
        event = self.folder.event
        event.setTitle('Some Event')
        event.setLocation(location)
        event.setEffectiveDate(self.effective_date)
        event.setStartDate(self.start_date)
        event.reindexObject()

    def getFeedItem(self, context, doc):
        """ getFeedItem
        """
        adapter = queryMultiAdapter((doc, context), IFeedItem)
        if adapter is None:
            adapter = BaseItem(doc, context)
        return adapter

    def testTitle(self):
        """ Title
        """
        entry = self.getFeedItem(self.folder, self.folder.doc)
        self.assertEquals(entry.title, 'Some Document')

        entry = self.getFeedItem(self.folder, self.folder.event)
        self.assertEquals(entry.title, 'Some Event')

    def testDate(self):
        """ Date
        """
        entry = self.getFeedItem(self.folder, self.folder.doc)
        ed = self.effective_date.asdatetime().replace(microsecond=0)
        effective_date = DateTime(ed).utcdatetime()
        self.assertEquals(entry.published.utcdatetime(), effective_date)

        entry = self.getFeedItem(self.folder, self.folder.event)
        sd = self.folder.event.start().asdatetime().replace(microsecond=0)
        start_date = DateTime(sd).utcdatetime()
        self.assertEquals(entry.published.utcdatetime(), start_date)

    def testFolderThumb(self):
        """ Folder thumb
        """
        # simulate publications which are folders
        self.folder.invokeFactory(
            'Image', id='img1', image=image, title='Simple Image')
        view = self.folder.restrictedTraverse('@@rss.xml')
        entry = self.getFeedItem(self.folder, self.folder)
        self.failUnless('img' in view.getItemDescription(entry))

    def testHighlightThumb(self):
        """ Highlight thumb
        """
        highlight = self.folder[self.folder.invokeFactory(
            'Highlight', id='h1', title='Highlight')]
        highlight.setImage(image)
        view = highlight.restrictedTraverse('@@rss.xml')
        entry = self.getFeedItem(self.folder, highlight)
        self.failUnless('img' in view.getItemDescription(entry))
Esempio n. 29
0
    def getSeatsPropertiesForLogin(self, login):
        # initialize max_seats at 1
        max_seats = 1
        seat_timeout = 5 # initialize to 5 minutes

        if self.login_member_data_mapping is None:
            self.login_member_data_mapping = OOBTree() # if this has not been initialized then do it now
            if self.DEBUG:
                print "Initialized the Login Member Data Mapping"
  
        # if the max_seats has a valid cached value, then use it
        cached_member_data = self.login_member_data_mapping.get(login, None)
        
        now = DateTime()
        if cached_member_data and 'expireTime' in cached_member_data and 'maxSeats' in cached_member_data and 'seatTimeoutInMinutes' in cached_member_data and now < cached_member_data['expireTime']:
            max_seats = cached_member_data['maxSeats']
            seat_timeout = cached_member_data['seatTimeoutInMinutes']
        else:
            member = self.getMember(login)
            # get the max_seats property from the member data tool
            if member is not None:
                max_seats = member.getProperty("max_seats")
                seat_timeout = member.getProperty("seat_timeout_in_minutes")
                # cache the max_seats for login
                td_seat_timeout = datetime.timedelta(minutes=seat_timeout)
                self.login_member_data_mapping[login] = { 'maxSeats': int( max_seats ), 'seatTimeoutInMinutes': float( seat_timeout ), 'expireTime': DateTime( now.asdatetime() + td_seat_timeout )}

        return { 'maxSeats': int( max_seats ), 'seatTimeoutInMinutes': float( seat_timeout ) }
Esempio n. 30
0
    def index_html(self, REQUEST):
        """ """
        uid = REQUEST.form.get('uid')
        date_for_roles = REQUEST.form.get('date_for_roles')

        if "," in uid:
            user = None
            roles = None
            multi = json.dumps({'users': uid.split(",")})
        else:
            multi = None
            user, roles = self._prepare_user_page(uid)

        is_auth = _is_authenticated(REQUEST)
        # we can only connect to ldap with bind=True if we have an
        # authenticated user
        agent = self._get_ldap_agent(bind=is_auth)

        user_dn = agent._user_dn(uid)
        log_entries = list(reversed(agent._get_metadata(user_dn)))
        VIEWS = {}
        filtered_roles = set([info[0] for info in roles])   # + owner_roles)
        if date_for_roles:
            filter_date = DateTime(date_for_roles).asdatetime().date()
        else:
            filter_date = DateTime().asdatetime().date()

        for entry in log_entries:
            date = DateTime(entry['timestamp']).toZone("CET")
            entry['timestamp'] = date.ISO()
            view = VIEWS.get(entry['action'])
            if not view:
                view = getMultiAdapter((self, self.REQUEST),
                                       name="details_" + entry['action'])
                VIEWS[entry['action']] = view
            entry['view'] = view

            _roles = entry.get('data', {}).get('roles')
            _role = entry.get('data', {}).get('role')
            if date.asdatetime().date() >= filter_date:
                if entry['action'] == 'ENABLE_ACCOUNT':
                    filtered_roles.difference_update(set(_roles))
                elif entry['action'] == "DISABLE_ACCOUNT":
                    filtered_roles.update(set(_roles))
                elif entry['action'] in ["ADDED_TO_ROLE"]:
                    if _role and _role in filtered_roles:
                        filtered_roles.remove(_role)
                elif entry['action'] in ["REMOVED_FROM_ROLE"]:
                    if _role:
                        filtered_roles.add(_role)

        output = []
        for entry in log_entries:
            if output:
                last_entry = output[-1]
                check = ['author', 'action']
                flag = True
                for k in check:
                    if last_entry[k] != entry[k]:
                        flag = False
                        break
                if flag:
                    last_entry['data'].append(entry['data'])
                else:
                    entry['data'] = [entry['data']]
                    output.append(entry)
            else:
                entry['data'] = [entry['data']]
                output.append(entry)

        removed_roles = []
        if user.get('status') == 'disabled':
            # process log entries to list the roles the user had before
            # being disabled
            for entry in log_entries:
                if entry['action'] == 'DISABLE_ACCOUNT':
                    for role in entry['data'][0]['roles']:
                        try:
                            role_description = agent.role_info(role)[
                                'description']
                        except:
                            role_description = ("This role doesn't exist "
                                                "anymore")
                        removed_roles.append((role, role_description))
                    break

        return self._render_template(
            "zpt/userdetails/index.zpt", context=self,
            filtered_roles=filtered_roles, user=user, roles=roles,
            removed_roles=removed_roles, multi=multi, log_entries=output)
def notify_and_expire():
    """
    For each registered user check all the conditions and execute
    the notification action
    """
    
    portal = api.portal.get()
    registry = getUtility(IRegistry)
    validity_period = registry['collective.pwexpiry.validity_period']
    notifications_to_use = set()
    if 'collective.pwexpiry.notification_actions' in registry:
        notifications_to_use = registry['collective.pwexpiry.notification_actions']
    current_time = portal.ZopeTime()
    local_tz = current_time.timezone()
    for user_id in portal.acl_users.source_users.getUserIds():
        user = portal.portal_membership.getMemberById(user_id)
        password_date = DateTime(user.getProperty('password_date', '2000/01/01'))
        last_notification_date = DateTime(user.getProperty('last_notification_date', '2000/01/01'))
        last_notification_date = last_notification_date.toZone(local_tz)
        if str(password_date) == DateTime('2000/01/01'):
            # The user has not set the changed the password yet - the current time
            # is set as the initial value
            user.setMemberProperties({'password_date': current_time})
            logger.info('Set new password reset date for user: %s' % user_id)
        else:
            # Counting days difference since the user reset his password
            since_last_pw_reset = days_since_event(password_date.asdatetime(),
                                                   current_time.asdatetime())
            # Counting days difference since the notification has been sent to the user
            since_last_notification = days_since_event(last_notification_date.asdatetime(),
                                                       current_time.asdatetime())
            # Number of days before the user's password expires
            days_to_expire = validity_period - since_last_pw_reset

            # Search for registered notifications and execute them
            notifications = getAdapters((portal,), IExpirationCheck)
            for notification_name, notification in notifications:
                if notifications_to_use and notification_name not in notifications_to_use:
                    msg = ("Skipping notification %s because it is not in "
                            "registry['collective.pwexpiry.notification_actions']")
                    logger.debug(msg % notification_name)
                    continue
                if notification(days_to_expire):
                    try:
                        # Protection of sending the expired notification email twice
                        pwres_to_notif = days_since_event(password_date.asdatetime(),
                                                    last_notification_date.asdatetime())
                        if pwres_to_notif > validity_period:
                            logger.warning('Omitting notification for user: \'%s\' ' \
                                           'because the expiration email has already ' \
                                           'been sent once.'% (user_id))
                            break
                        # Protection of sending the notification email twice
                        if since_last_notification < 1:
                            logger.warning('Omitting notification for user: \'%s\' ' \
                                           'because the notification has been already ' \
                                           'sent today.'% (user_id))
                            break

                        # Executing the notification action and updating user's property
                        notification.notification_action(user, days_to_expire)
                        logger.info('Triggered %s action for user: %s' % (
                            notification_name, user_id)
                        )
                        user.setMemberProperties({'last_notification_date': current_time})
                    except Exception, exc:
                        # Continue with the script even in case of problems
                        logger.error('Error while performing notification: %s ' \
                                  'for user: %s: %s' % (notification_name, user_id, exc))
                        continue
Esempio n. 32
0
def notify_and_expire():
    """
    For each registered user check all the conditions and execute
    the notification action
    """
    logger = logging.getLogger("collective.pwexpiry")
    logger.info("*" * 8 + "Executing notify_an_expire script" + "*" * 8)

    portal = api.portal.get()
    registry = getUtility(IRegistry)
    validity_period = registry["collective.pwexpiry.validity_period"]

    if validity_period == 0:
        # do not do any notifications, if password expiration has been disabled
        return

    notifications_to_use = set()
    if "collective.pwexpiry.notification_actions" in registry:
        notifications_to_use = registry[
            "collective.pwexpiry.notification_actions"]
    current_time = portal.ZopeTime()
    local_tz = current_time.timezone()

    whitelisted = registry.get("collective.pwexpiry.whitelisted_users")
    for user_id in portal.acl_users.source_users.getUserIds():
        # Ignore whitelisted
        if whitelisted and user_id in whitelisted:
            continue

        user = portal.portal_membership.getMemberById(user_id)
        password_date = DateTime(
            user.getProperty("password_date", "2000/01/01"))
        last_notification_date = DateTime(
            user.getProperty("last_notification_date", "2000/01/01"))
        last_notification_date = last_notification_date.toZone(local_tz)

        if password_date == DateTime("2000/01/01"):
            # The user has not set the changed the password yet -
            # the current time is set as the initial value
            user.setMemberProperties({"password_date": current_time})
            logger.info("Set new password reset date for user: %s" % user_id)
            continue

        # Counting days difference since the user reset his password
        since_last_pw_reset = days_since_event(password_date.asdatetime(),
                                               current_time.asdatetime())
        # Counting days diff since the notification has been sent to the user
        since_last_notification = days_since_event(
            last_notification_date.asdatetime(), current_time.asdatetime())
        # Number of days before the user's password expires
        days_to_expire = validity_period - since_last_pw_reset

        # Search for registered notifications and execute them
        notifications = getAdapters((portal, ), IExpirationCheck)
        for notification_name, notification in notifications:
            if (notifications_to_use
                    and notification_name not in notifications_to_use):
                msg = ("Skipping notification %s because it is not in "
                       "registry['collective.pwexpiry.notification_actions']")
                logger.debug(msg % notification_name)
                continue
            if notification(days_to_expire):
                try:
                    # Protection of sending the
                    # expired notification email twice
                    pwres_to_notif = days_since_event(
                        password_date.asdatetime(),
                        last_notification_date.asdatetime(),
                    )
                    if pwres_to_notif > validity_period:
                        logger.warning(
                            "Omitting notification for user: '******' "
                            "because the expiration email has already "
                            "been sent once." % (user_id))
                        break
                    # Protection of sending the notification email twice
                    if since_last_notification < 1:
                        logger.warning(
                            "Omitting notification for user: '******' "
                            "because the notification has been already "
                            "sent today." % (user_id))
                        break

                    # Executing the notification
                    # action and updating user's property
                    notification.notification_action(user, days_to_expire)
                    logger.info("Triggered %s action for user: %s" %
                                (notification_name, user_id))
                    user.setMemberProperties(
                        {"last_notification_date": current_time})
                except Exception:
                    # Continue with the script even in case of problems
                    logger.exception("Error while performing notification: %s "
                                     "for user: %s" %
                                     (notification_name, user_id))
                    continue
    def authenticateCredentials(self, credentials):
        """See IAuthenticationPlugin.

        This plugin will actually never authenticate.

        o We expect the credentials to be those returned by
          ILoginPasswordExtractionPlugin.
        """
        request = self.REQUEST
        alsoProvides(request, IDisableCSRFProtection)

        response = request['RESPONSE']
        pas_instance = self._getPAS()

        login = credentials.get('login')
        password = credentials.get('password')

        if None in (login, password, pas_instance) and (
                credentials.get('source') != 'plone.session'):
            return None
        else:
            session_source = self.session

            ticket = credentials.get('cookie')

            if session_source._shared_secret is not None:
                ticket_data = tktauth.validateTicket(
                    session_source._shared_secret,
                    ticket,
                    timeout=session_source.timeout,
                    mod_auth_tkt=session_source.mod_auth_tkt)
            else:
                ticket_data = None
                manager = queryUtility(IKeyManager)
                if manager is None:
                    return None
                for secret in manager[u"_system"]:
                    if secret is None:
                        continue

                    ticket_data = tktauth.validateTicket(
                        secret,
                        ticket,
                        timeout=session_source.timeout,
                        mod_auth_tkt=session_source.mod_auth_tkt)

                    if ticket_data is not None:
                        break

            if ticket_data is None:
                return None

            (digest, userid, tokens, user_data, timestamp) = ticket_data
            pas = self._getPAS()
            info = pas._verifyUser(pas.plugins, user_id=userid)

            if info is None:
                return None

            login = info['login']

        cookie_val = self.getCookie()

        # get max seats from member data property or cache and default to 1 if not set
        try:
            max_seats = self.getMaxSeatsForLogin(login)
        except:
            traceback.print_exc()

        # When debugging, print the maxSeats value that was resolved
        if self.DEBUG:
            print "authenticateCredentials():: Max Seats is " + str(max_seats)

        if max_seats == 1:
            if cookie_val:
                # A cookie value is there.  If it's the same as the value
                # in our mapping, it's fine.  Otherwise we'll force a
                # logout.
                existing = self.mapping1.get(login, None)

                if self.DEBUG:
                    if existing:
                        print "authenticateCredentials():: cookie_val is " + cookie_val + ", and active tokens are: " + ', '.join(
                            existing['tokens'])

                if existing and cookie_val not in existing['tokens']:
                    # The cookies values differ, we want to logout the
                    # user by calling resetCredentials.  Note that this
                    # will eventually call our own resetCredentials which
                    # will cleanup our own cookie.
                    try:
                        self.resetAllCredentials(request, response)
                        pas_instance.plone_utils.addPortalMessage(
                            _(u"Someone else logged in under your name.  You have been \
                            logged out"), "error")
                    except:
                        traceback.print_exc()
                elif existing is None:
                    # The browser has the cookie but we don't know about
                    # it.  Let's reset our own cookie:
                    self.setCookie('')

            else:
                # When no cookie is present, we generate one, store it and
                # set it in the response:
                cookie_val = uuid()
                # do some cleanup in our mappings
                existing = self.mapping1.get(login)

                if existing and 'tokens' in existing:
                    try:
                        if existing['tokens'][0] in self.mapping2:
                            del self.mapping2[existing['tokens'][0]]
                    except:
                        pass

                try:
                    from_ip = self.get_ip(request)
                except:
                    traceback.print_exc()

                now = DateTime()
                self.mapping1[login] = {'tokens': []}
                self.mapping1[login]['tokens'].append(cookie_val)
                self.mapping2[cookie_val] = {
                    'userid':
                    login,
                    'ip':
                    from_ip,
                    'startTime':
                    now,
                    'expireTime':
                    DateTime(now.asdatetime() + self.time_to_persist_cookies)
                }
                self.setCookie(cookie_val)
        else:
            # Max seats is not 1. Treat this as a floating licenses scenario.
            # Nobody is logged out, but once the max seats threshold is reached,
            # active tokens must expire before new users may log in.
            if cookie_val:
                # When the cookie value is there, try to verify it or activate it if is it not added yet
                self.verifyToken(cookie_val, login, max_seats, request,
                                 response)
            else:
                if self.DEBUG:
                    print "authenticateCredentials:: Try to issue a token because there is no cookie value."

                # When no cookie is present, attempt to issue a token and use the cookie to store it
                self.issueToken(login, max_seats, request, response)
                # if max_seats are filled, then force logout
                if self.isLoginAtCapacity(login, max_seats):
                    self.forceLogoutForUser(login, request, response)

        return None  # Note that we never return anything useful
class TestSyndication(EEAContentTypeTestCase):
    """ Test-cases for syndication. """

    def afterSetUp(self):
        """ Set up
        """
        self.setRoles('Manager')
        self.workflow = self.portal.portal_workflow
        self.effective_date = DateTime(year=2008, month=4, day=3)
        self.start_date = DateTime(year=2007, month=1, day=1)

        self.folder.invokeFactory('Document', id='doc')
        doc = self.folder.doc
        doc.setTitle('Some Document')
        doc.setEffectiveDate(self.effective_date)
        doc.reindexObject()

        self.folder.invokeFactory('QuickEvent', id='event')
        event = self.folder.event
        event.setTitle('Some Event')
        event.setLocation(location)
        event.setEffectiveDate(self.effective_date)
        event.setStartDate(self.start_date)
        event.reindexObject()

    def getFeedItem(self, context, doc):
        """ getFeedItem
        """
        adapter = queryMultiAdapter((doc, context), IFeedItem)
        if adapter is None:
            adapter = BaseItem(doc, context)
        return adapter

    def testTitle(self):
        """ Title
        """
        entry = self.getFeedItem(self.folder, self.folder.doc)
        self.assertEquals(entry.title, 'Some Document')

        entry = self.getFeedItem(self.folder, self.folder.event)
        self.assertEquals(entry.title, 'Some Event')

    def testDate(self):
        """ Date
        """
        entry = self.getFeedItem(self.folder, self.folder.doc)
        ed = self.effective_date.asdatetime().replace(microsecond=0)
        effective_date = DateTime(ed).utcdatetime()
        self.assertEquals(entry.published.utcdatetime(), effective_date)

        entry = self.getFeedItem(self.folder, self.folder.event)
        sd = self.folder.event.start().asdatetime().replace(microsecond=0)
        start_date = DateTime(sd).utcdatetime()
        self.assertEquals(entry.published.utcdatetime(), start_date)

    def testFolderThumb(self):
        """ Folder thumb
        """
        # simulate publications which are folders
        self.folder.invokeFactory(
            'Image', id='img1', image=image, title='Simple Image')
        view = self.folder.restrictedTraverse('@@rss.xml')
        entry = self.getFeedItem(self.folder, self.folder)
        self.failUnless('img' in view.getItemDescription(entry))

    def testHighlightThumb(self):
        """ Highlight thumb
        """
        highlight = self.folder[self.folder.invokeFactory(
            'Highlight', id='h1', title='Highlight')]
        highlight.setImage(image)
        view = highlight.restrictedTraverse('@@rss.xml')
        entry = self.getFeedItem(self.folder, highlight)
        self.failUnless('img' in view.getItemDescription(entry))
Esempio n. 35
0
class DashboardView(BrowserView):
    template = ViewPageTemplateFile("templates/dashboard.pt")

    def __init__(self, context, request):
        BrowserView.__init__(self, context, request)
        self.dashboard_cookie = None
        self.member = None

    def __call__(self):
        frontpage_url = self.portal_url + "/senaite-frontpage"
        if not self.context.bika_setup.getDashboardByDefault():
            # Do not render dashboard, render frontpage instead
            self.request.response.redirect(frontpage_url)
            return

        mtool = getToolByName(self.context, 'portal_membership')
        if mtool.isAnonymousUser():
            # Anonymous user, redirect to frontpage
            self.request.response.redirect(frontpage_url)
            return

        self.member = mtool.getAuthenticatedMember()
        self._init_date_range()
        self.dashboard_cookie = self.check_dashboard_cookie()
        return self.template()

    def check_dashboard_cookie(self):
        """
        Check if the dashboard cookie should exist through bikasetup
        configuration.

        If it should exist but doesn't exist yet, the function creates it
        with all values as default.
        If it should exist and already exists, it returns the value.
        Otherwise, the function returns None.

        :return: a dictionary of strings
        """
        # Getting cookie
        cookie_raw = self.request.get(DASHBOARD_FILTER_COOKIE, None)
        # If it doesn't exist, create it with default values
        if cookie_raw is None:
            cookie_raw = self._create_raw_data()
            self.request.response.setCookie(DASHBOARD_FILTER_COOKIE,
                                            json.dumps(cookie_raw),
                                            quoted=False,
                                            path='/')
            return cookie_raw
        return get_strings(json.loads(cookie_raw))

    def is_filter_selected(self, selection_id, value):
        """
        Compares whether the 'selection_id' parameter value saved in the
        cookie is the same value as the "value" parameter.

        :param selection_id: a string as a dashboard_cookie key.
        :param value: The value to compare against the value from
        dashboard_cookie key.
        :return: Boolean.
        """
        selected = self.dashboard_cookie.get(selection_id)
        return selected == value

    def is_admin_user(self):
        """
        Checks if the user is the admin or a SiteAdmin user.
        :return: Boolean
        """
        user = api.user.get_current()
        roles = user.getRoles()
        return "LabManager" in roles or "Manager" in roles

    def _create_raw_data(self):
        """
        Gathers the different sections ids and creates a string as first
        cookie data.

        :return: A dictionary like:
            {'analyses':'all','analysisrequest':'all','worksheets':'all'}
        """
        result = {}
        for section in self.get_sections():
            result[section.get('id')] = 'all'
        return result

    def _init_date_range(self):
        """ Sets the date range from which the data must be retrieved.
            Sets the values to the class parameters 'date_from',
            'date_to', 'date_range', and self.periodicity
            Calculates the date range according to the value of the
            request's 'p' parameter:
            - 'd' (daily)
            - 'w' (weekly)
            - 'm' (monthly)
            - 'q' (quarterly)
            - 'b' (biannual)
            - 'y' (yearly)
            - 'a' (all-time)
        """
        # By default, weekly
        self.periodicity = self.request.get('p', 'w')
        if (self.periodicity == 'd'):
            # Daily
            self.date_from = DateTime()
            self.date_to = DateTime() + 1
            # For time-evolution data, load last 30 days
            self.min_date = self.date_from - 30
        elif (self.periodicity == 'm'):
            # Monthly
            today = datetime.date.today()
            self.date_from = DateTime(today.year, today.month, 1)
            self.date_to = DateTime(today.year, today.month,
                                    monthrange(today.year, today.month)[1], 23,
                                    59, 59)
            # For time-evolution data, load last two years
            min_year = today.year - 1 if today.month == 12 else today.year - 2
            min_month = 1 if today.month == 12 else today.month
            self.min_date = DateTime(min_year, min_month, 1)
        elif (self.periodicity == 'q'):
            # Quarterly
            today = datetime.date.today()
            m = (((today.month - 1) / 3) * 3) + 1
            self.date_from = DateTime(today.year, m, 1)
            self.date_to = DateTime(today.year, m + 2,
                                    monthrange(today.year, m + 2)[1], 23, 59,
                                    59)
            # For time-evolution data, load last four years
            min_year = today.year - 4 if today.month == 12 else today.year - 5
            self.min_date = DateTime(min_year, m, 1)
        elif (self.periodicity == 'b'):
            # Biannual
            today = datetime.date.today()
            m = (((today.month - 1) / 6) * 6) + 1
            self.date_from = DateTime(today.year, m, 1)
            self.date_to = DateTime(today.year, m + 5,
                                    monthrange(today.year, m + 5)[1], 23, 59,
                                    59)
            # For time-evolution data, load last ten years
            min_year = today.year - 10 if today.month == 12 else today.year - 11
            self.min_date = DateTime(min_year, m, 1)
        elif (self.periodicity == 'y'):
            # Yearly
            today = datetime.date.today()
            self.date_from = DateTime(today.year, 1, 1)
            self.date_to = DateTime(today.year, 12, 31, 23, 59, 59)
            # For time-evolution data, load last 15 years
            min_year = today.year - 15 if today.month == 12 else today.year - 16
            self.min_date = DateTime(min_year, 1, 1)
        elif (self.periodicity == 'a'):
            # All time
            today = datetime.date.today()
            self.date_from = DateTime('1990-01-01 00:00:00')
            self.date_to = DateTime(today.year, 12, 31, 23, 59, 59)
            # For time-evolution data, load last 15 years
            min_year = today.year - 15 if today.month == 12 else today.year - 16
            self.min_date = DateTime(min_year, 1, 1)
        else:
            # weekly
            today = datetime.date.today()
            year, weeknum, dow = today.isocalendar()
            self.date_from = DateTime() - dow
            self.date_to = self.date_from + 7
            # For time-evolution data, load last six months
            min_year = today.year if today.month > 6 else today.year - 1
            min_month = today.month - 6 if today.month > 6 else (today.month -
                                                                 6) + 12
            self.min_date = DateTime(min_year, min_month, 1)

        self.date_range = {
            'query': (self.date_from, self.date_to),
            'range': 'min:max'
        }
        self.base_date_range = {
            'query': (DateTime('1990-01-01 00:00:00'), DateTime() + 1),
            'range': 'min:max'
        }
        self.min_date_range = {
            'query': (self.min_date, self.date_to),
            'range': 'min:max'
        }

    def get_sections(self):
        """ Returns an array with the sections to be displayed.
            Every section is a dictionary with the following structure:
                {'id': <section_identifier>,
                 'title': <section_title>,
                'panels': <array of panels>}
        """
        sections = []
        user = api.user.get_current()
        if is_panel_visible_for_user('analyses', user):
            sections.append(self.get_analyses_section())
        if is_panel_visible_for_user('analysisrequests', user):
            sections.append(self.get_analysisrequests_section())
        if is_panel_visible_for_user('worksheets', user):
            sections.append(self.get_worksheets_section())
        if is_panel_visible_for_user('samples', user):
            sections.append(self.get_samples_section())
        return sections

    def get_filter_options(self):
        """
        Returns dasboard filter options.
        :return: Boolean
        """
        dash_opt = DisplayList((
            ('all', _('All')),
            ('mine', _('Mine')),
        ))
        return dash_opt

    def _getStatistics(self, name, description, url, catalog, criterias,
                       total):
        out = {
            'type': 'simple-panel',
            'name': name,
            'class': 'informative',
            'description': description,
            'total': total,
            'link': self.portal_url + '/' + url
        }

        results = 0
        ratio = 0
        if total > 0:
            results = len(catalog(criterias))
            results = results if total >= results else total
            ratio = (float(results) / float(total)) * 100 if results > 0 else 0
        ratio = str("%%.%sf" % 1) % ratio
        out['legend'] = _('of') + " " + str(total) + ' (' + ratio + '%)'
        out['number'] = results
        out['percentage'] = float(ratio)
        return out

    def get_analysisrequests_section(self):
        """ Returns the section dictionary related with Analysis
            Requests, that contains some informative panels (like
            ARs to be verified, ARs to be published, etc.)
        """
        out = []
        catalog = getToolByName(self.context, CATALOG_ANALYSIS_REQUEST_LISTING)
        query = {
            'portal_type': "AnalysisRequest",
            'cancellation_state': ['active']
        }
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering(
        )
        if filtering_allowed:
            cookie_dep_uid = self.request.get(
                FILTER_BY_DEPT_COOKIE_ID,
                '').split(',') if filtering_allowed else ''
            query['getDepartmentUIDs'] = {
                "query": cookie_dep_uid,
                "operator": "or"
            }

        # Check if dashboard_cookie contains any values to query
        # elements by
        query = self._update_criteria_with_filters(query, 'analysisrequests')

        # Active Analysis Requests (All)
        total = len(catalog(query))

        # Sampling workflow enabled?
        if (self.context.bika_setup.getSamplingWorkflowEnabled()):
            # Analysis Requests awaiting to be sampled or scheduled
            name = _('Analysis Requests to be sampled')
            desc = _("To be sampled")
            purl = 'samples?samples_review_state=to_be_sampled'
            query['review_state'] = [
                'to_be_sampled',
            ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

            # Analysis Requests awaiting to be preserved
            name = _('Analysis Requests to be preserved')
            desc = _("To be preserved")
            purl = 'samples?samples_review_state=to_be_preserved'
            query['review_state'] = [
                'to_be_preserved',
            ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

            # Analysis Requests scheduled for Sampling
            name = _('Analysis Requests scheduled for sampling')
            desc = _("Sampling scheduled")
            purl = 'samples?samples_review_state=scheduled_sampling'
            query['review_state'] = [
                'scheduled_sampling',
            ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

        # Analysis Requests awaiting for reception
        name = _('Analysis Requests to be received')
        desc = _("Reception pending")
        purl = 'analysisrequests?analysisrequests_review_state=sample_due'
        query['review_state'] = [
            'sample_due',
        ]
        out.append(self._getStatistics(name, desc, purl, catalog, query,
                                       total))

        # Analysis Requests under way
        name = _('Analysis Requests with results pending')
        desc = _("Results pending")
        purl = 'analysisrequests?analysisrequests_review_state=sample_received'
        query['review_state'] = [
            'attachment_due', 'sample_received', 'assigned'
        ]
        out.append(self._getStatistics(name, desc, purl, catalog, query,
                                       total))

        # Analysis Requests to be verified
        name = _('Analysis Requests to be verified')
        desc = _("To be verified")
        purl = 'analysisrequests?analysisrequests_review_state=to_be_verified'
        query['review_state'] = [
            'to_be_verified',
        ]
        out.append(self._getStatistics(name, desc, purl, catalog, query,
                                       total))

        # Analysis Requests verified (to be published)
        name = _('Analysis Requests verified')
        desc = _("Verified")
        purl = 'analysisrequests?analysisrequests_review_state=verified'
        query['review_state'] = [
            'verified',
        ]
        out.append(self._getStatistics(name, desc, purl, catalog, query,
                                       total))

        # Analysis Requests published
        name = _('Analysis Requests published')
        desc = _("Published")
        purl = 'analysisrequests?analysisrequests_review_state=published'
        query['review_state'] = [
            'published',
        ]
        out.append(self._getStatistics(name, desc, purl, catalog, query,
                                       total))

        # Analysis Requests to be printed
        if self.context.bika_setup.getPrintingWorkflowEnabled():
            name = _('Analysis Requests to be printed')
            desc = _("To be printed")
            purl = 'analysisrequests?analysisrequests_getPrinted=0'
            query['getPrinted'] = '0'
            query['review_state'] = [
                'published',
            ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

        # Chart with the evolution of ARs over a period, grouped by
        # periodicity
        if 'review_state' in query:
            del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(catalog, query)
        out.append({
            'type': 'bar-chart-panel',
            'name': _('Evolution of Analysis Requests'),
            'class': 'informative',
            'description': _('Evolution of Analysis Requests'),
            'data': json.dumps(outevo),
            'datacolors': json.dumps(self.get_colors_palette())
        })

        return {
            'id': 'analysisrequests',
            'title': _('Analysis Requests'),
            'panels': out
        }

    def get_worksheets_section(self):
        """ Returns the section dictionary related with Worksheets,
            that contains some informative panels (like
            WS to be verified, WS with results pending, etc.)
        """
        out = []
        bc = getToolByName(self.context, CATALOG_WORKSHEET_LISTING)
        query = {
            'portal_type': "Worksheet",
        }
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering(
        )
        if filtering_allowed:
            cookie_dep_uid = self.request.get(
                FILTER_BY_DEPT_COOKIE_ID,
                '').split(',') if filtering_allowed else ''
            query['getDepartmentUIDs'] = {
                "query": cookie_dep_uid,
                "operator": "or"
            }

        # Check if dashboard_cookie contains any values to query
        # elements by
        query = self._update_criteria_with_filters(query, 'worksheets')

        # Active Worksheets (all)
        total = len(bc(query))

        # Open worksheets
        name = _('Results pending')
        desc = _('Results pending')
        purl = 'worksheets?list_review_state=open'
        query['review_state'] = ['open', 'attachment_due']
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Worksheets to be verified
        name = _('To be verified')
        desc = _('To be verified')
        purl = 'worksheets?list_review_state=to_be_verified'
        query['review_state'] = [
            'to_be_verified',
        ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Worksheets verified
        name = _('Verified')
        desc = _('Verified')
        purl = 'worksheets?list_review_state=verified'
        query['review_state'] = [
            'verified',
        ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Chart with the evolution of WSs over a period, grouped by
        # periodicity
        del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(bc, query)
        out.append({
            'type': 'bar-chart-panel',
            'name': _('Evolution of Worksheets'),
            'class': 'informative',
            'description': _('Evolution of Worksheets'),
            'data': json.dumps(outevo),
            'datacolors': json.dumps(self.get_colors_palette())
        })

        return {'id': 'worksheets', 'title': _('Worksheets'), 'panels': out}

    def get_analyses_section(self):
        """ Returns the section dictionary related with Analyses,
            that contains some informative panels (analyses pending
            analyses assigned, etc.)

            sample_registered, not_requested, published, retracted,
            sample_due, sample_received, sample_prep, sampled, to_be_preserved,
            to_be_sampled, , to_be_verified, rejected, verified, to_be_verified,
            assigned
        """
        out = []
        bc = getToolByName(self.context, CATALOG_ANALYSIS_LISTING)
        query = {'portal_type': "Analysis", 'cancellation_state': ['active']}
        filtering_allowed = self.context.bika_setup.getAllowDepartmentFiltering(
        )
        if filtering_allowed:
            cookie_dep_uid = self.request.get(
                FILTER_BY_DEPT_COOKIE_ID,
                '').split(',') if filtering_allowed else ''
            query['getDepartmentUID'] = {
                "query": cookie_dep_uid,
                "operator": "or"
            }

        # Check if dashboard_cookie contains any values to query elements by
        query = self._update_criteria_with_filters(query, 'analyses')

        # Active Analyses (All)
        total = len(bc(query))

        # Analyses to be assigned
        name = _('Assignment pending')
        desc = _('Assignment pending')
        purl = 'aggregatedanalyses'
        query['review_state'] = [
            'sample_received',
        ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses pending
        name = _('Results pending')
        desc = _('Results pending')
        purl = 'aggregatedanalyses'
        query['review_state'] = ['assigned', 'attachment_due']
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses to be verified
        name = _('To be verified')
        desc = _('To be verified')
        purl = 'aggregatedanalyses'
        query['review_state'] = [
            'to_be_verified',
        ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Analyses verified
        name = _('Verified')
        desc = _('Verified')
        purl = 'aggregatedanalyses'
        query['review_state'] = [
            'verified',
        ]
        out.append(self._getStatistics(name, desc, purl, bc, query, total))

        # Chart with the evolution of Analyses over a period, grouped by
        # periodicity
        del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(bc, query)
        out.append({
            'type': 'bar-chart-panel',
            'name': _('Evolution of Analyses'),
            'class': 'informative',
            'description': _('Evolution of Analyses'),
            'data': json.dumps(outevo),
            'datacolors': json.dumps(self.get_colors_palette())
        })
        return {'id': 'analyses', 'title': _('Analyses'), 'panels': out}

    def get_samples_section(self):
        out = []
        catalog = getToolByName(self.context, 'portal_catalog')
        query = {'portal_type': "Sample", 'cancellation_state': ['active']}
        filtering_allowed = \
            self.context.bika_setup.getAllowDepartmentFiltering()
        if filtering_allowed:
            cookie_dep_uid = self.request\
                .get(FILTER_BY_DEPT_COOKIE_ID, '')\
                .split(',') if filtering_allowed else ''
            query['getDepartmentUIDs'] = {
                "query": cookie_dep_uid,
                "operator": "or"
            }

        # Check if dashboard_cookie contains any values to query
        # elements by
        query = self._update_criteria_with_filters(query, 'samples')

        # Active Samples (All)
        total = len(catalog(query))

        # Sampling workflow enabled?
        if self.context.bika_setup.getSamplingWorkflowEnabled():
            # Analysis Requests awaiting to be sampled or scheduled
            name = _('Samples to be sampled')
            desc = _("To be sampled")
            purl = 'samples?samples_review_state=to_be_sampled'
            query['review_state'] = [
                'to_be_sampled',
            ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

            # Samples awaiting to be preserved
            name = _('Samples to be preserved')
            desc = _("To be preserved")
            purl = 'samples?samples_review_state=to_be_preserved'
            query['review_state'] = [
                'to_be_preserved',
            ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

            # Samples scheduled for Sampling
            name = _('Samples scheduled for sampling')
            desc = _("Sampling scheduled")
            purl = 'samples?samples_review_state=scheduled_sampling'
            query['review_state'] = [
                'scheduled_sampling',
            ]
            out.append(
                self._getStatistics(name, desc, purl, catalog, query, total))

        # Samples awaiting for reception
        name = _('Samples to be received')
        desc = _("Reception pending")
        purl = 'samples?samples_review_state=sample_due'
        query['review_state'] = [
            'sample_due',
        ]
        out.append(self._getStatistics(name, desc, purl, catalog, query,
                                       total))

        # Samples under way
        name = _('Samples received')
        desc = _("Samples received")
        purl = 'samples?samples_review_state=sample_received'
        query['review_state'] = [
            'sample_received',
        ]
        out.append(self._getStatistics(name, desc, purl, catalog, query,
                                       total))

        # Samples rejected
        name = _('Samples rejected')
        desc = _("Samples rejected")
        purl = 'samples?samples_review_state=rejected'
        query['review_state'] = [
            'rejected',
        ]
        out.append(self._getStatistics(name, desc, purl, catalog, query,
                                       total))

        # Chart with the evolution of samples over a period, grouped by
        # periodicity
        if 'review_state' in query:
            del query['review_state']
        query['sort_on'] = 'created'
        query['created'] = self.min_date_range
        outevo = self._fill_dates_evo(catalog, query)
        out.append({
            'type': 'bar-chart-panel',
            'name': _('Evolution of Samples'),
            'class': 'informative',
            'description': _('Evolution of Samples'),
            'data': json.dumps(outevo),
            'datacolors': json.dumps(self.get_colors_palette())
        })

        return {'id': 'samples', 'title': _('Samples'), 'panels': out}

    def get_states_map(self, portal_type):
        if portal_type == 'Analysis':
            return {
                'to_be_sampled': _('Sample reception pending'),
                'sample_due': _('Sample reception pending'),
                'sample_received': _('Assignment pending'),
                'assigned': _('Results pending'),
                'attachment_due': _('Results pending'),
                'to_be_verified': _('To be verified'),
                'rejected': _('Rejected'),
                'retracted': _('Retracted'),
                'verified': _('Verified'),
                'published': _('Published')
            }
        elif portal_type == 'AnalysisRequest':
            return {
                'to_be_sampled': _('To be sampled'),
                'to_be_preserved': _('To be preserved'),
                'scheduled_sampling': _('Sampling scheduled'),
                'sample_due': _('Reception pending'),
                'rejected': _('Rejected'),
                'sample_received': _('Results pending'),
                'assigned': _('Results pending'),
                'attachment_due': _('Results pending'),
                'to_be_verified': _('To be verified'),
                'verified': _('Verified'),
                'published': _('Published')
            }
        elif portal_type == 'Worksheet':
            return {
                'open': _('Results pending'),
                'attachment_due': _('Results pending'),
                'to_be_verified': _('To be verified'),
                'verified': _('Verified')
            }

        elif portal_type == 'Sample':
            return {
                'to_be_sampled': _('To be sampled'),
                'to_be_preserved': _('To be preserved'),
                'scheduled_sampling': _('Sampling scheduled'),
                'sample_due': _('Reception pending'),
                'rejected': _('Rejected'),
                'sample_received': _('Sample received'),
            }

    def get_colors_palette(self):
        return {
            'to_be_sampled': '#FA6900',
            _('To be sampled'): '#FA6900',
            'to_be_preserved': '#C44D58',
            _('To be preserved'): '#C44D58',
            'scheduled_sampling': '#FA6900',
            _('Sampling scheduled'): '#FA6900',
            'sample_due': '#F38630',
            _('Sample reception pending'): '#F38630',
            _('Reception pending'): '#F38630',
            'sample_received': '#E0E4CC',
            _('Assignment pending'): '#E0E4CC',
            'assigned': '#dcdcdc',
            'attachment_due': '#dcdcdc',
            'open': '#dcdcdc',
            _('Results pending'): '#dcdcdc',
            'rejected': '#FF6B6B',
            'retracted': '#FF6B6B',
            _('Rejected'): '#FF6B6B',
            _('Retracted'): '#FF6B6B',
            'to_be_verified': '#A7DBD8',
            _('To be verified'): '#A7DBD8',
            'verified': '#69D2E7',
            _('Verified'): '#69D2E7',
            'published': '#83AF9B',
            _('Published'): '#83AF9B',
        }

    def _getDateStr(self, period, created):
        if period == 'y':
            created = created.year()
        elif period == 'b':
            m = (((created.month() - 1) / 6) * 6) + 1
            created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2))
        elif period == 'q':
            m = (((created.month() - 1) / 3) * 3) + 1
            created = '%s-%s' % (str(created.year())[2:], str(m).zfill(2))
        elif period == 'm':
            created = '%s-%s' % (str(created.year())[2:], str(
                created.month()).zfill(2))
        elif period == 'w':
            d = (((created.day() - 1) / 7) * 7) + 1
            year, weeknum, dow = created.asdatetime().isocalendar()
            created = created - dow
            created = '%s-%s-%s' % (str(
                created.year())[2:], str(
                    created.month()).zfill(2), str(created.day()).zfill(2))
        elif period == 'a':
            # All time, but evolution chart grouped by year
            created = created.year()
        else:
            created = '%s-%s-%s' % (str(
                created.year())[2:], str(
                    created.month()).zfill(2), str(created.day()).zfill(2))
        return created

    def _fill_dates_evo(self, catalog, query):
        outevoidx = {}
        outevo = []
        days = 1
        if self.periodicity == 'y':
            days = 336
        elif self.periodicity == 'b':
            days = 168
        elif self.periodicity == 'q':
            days = 84
        elif self.periodicity == 'm':
            days = 28
        elif self.periodicity == 'w':
            days = 7
        elif self.periodicity == 'a':
            days = 336

        otherstate = _('Other status')
        statesmap = self.get_states_map(query['portal_type'])
        stats = statesmap.values()
        stats.sort()
        stats.append(otherstate)
        statscount = {s: 0 for s in stats}
        # Add first all periods, cause we want all segments to be displayed
        curr = self.min_date.asdatetime()
        end = self.date_to.asdatetime()
        while curr < end:
            currstr = self._getDateStr(self.periodicity, DateTime(curr))
            if currstr not in outevoidx:
                outdict = {'date': currstr}
                for k in stats:
                    outdict[k] = 0
                outevo.append(outdict)
                outevoidx[currstr] = len(outevo) - 1
            curr = curr + datetime.timedelta(days=days)
        for brain in catalog(query):
            # Check if we can use the brain
            if query.get('portal_type', '') in ['AnalysisRequest', 'Analysis']:
                created = brain.created
            # I not, get the object
            else:
                created = brain.getObject().created()
            state = brain.review_state
            if state not in statesmap:
                logger.warn("'%s' State for '%s' not available" %
                            (state, query['portal_type']))
            state = statesmap[state] if state in statesmap else otherstate
            created = self._getDateStr(self.periodicity, created)
            if created in outevoidx:
                oidx = outevoidx[created]
                statscount[state] += 1
                if state in outevo[oidx]:
                    outevo[oidx][state] += 1
                else:
                    outevo[oidx][state] = 1
            else:
                # Create new row
                currow = {'date': created, state: 1}
                outevo.append(currow)

        # Remove all those states for which there is no data
        rstates = [k for k, v in statscount.items() if v == 0]
        for o in outevo:
            for r in rstates:
                if r in o:
                    del o[r]

        return outevo

    def _update_criteria_with_filters(self, query, section_name):
        """
        This method updates the 'query' dictionary with the criteria stored in
        dashboard cookie.

        :param query: A dictionary with search criteria.
        :param section_name: The dashboard section name
        :return: The 'query' dictionary
        """
        if self.dashboard_cookie is None:
            return query
        cookie_criteria = self.dashboard_cookie.get(section_name)
        if cookie_criteria == 'mine':
            query['Creator'] = self.member.getId()
        return query

    def get_dashboard_panels_visibility(self, section_name):
        """
        Return a list of pairs as values that represents the role-permission
        view relation for the panel section.
        :param section_name: the panels section id.
        :return: a list of tuples.
        """
        return get_dashboard_panels_visibility_by_section(section_name)
    def verifyToken(self, token, login, max_seats, request, response):
        """ Activates a token by putting it in the tokens[] array of mapping1[login] if it is not already present. """

        alsoProvides(request, IDisableCSRFProtection)

        isVerified = False  # it is verified if it is already in the active tokens list server-side
        seat_timeout = 5  # default if there is a problem with the member property
        iTokens = 0  # assume no tokens are active until proven otherwise
        existing = self.mapping1.get(login)
        if existing and 'tokens' in existing:
            iTokens = len(existing['tokens'])

            isVerified = token in existing['tokens']

            if self.DEBUG:
                print "authenticateCredentials():: cookie_val is " + token + ", and active tokens are: " + ', '.join(
                    existing['tokens'])
        else:
            self.mapping1[login] = {
                'tokens': []
            }  # initialize tokens array for this login

        if self.DEBUG:
            print "verifyToken:: login = %s, active = %i, max = %i" % (
                login, iTokens, max_seats)

        try:
            # for seats > 1, use member property for cookie timeout value
            seat_timeout = self.getSeatTimeoutInMinutesForLogin(login)
            td_seat_timeout = datetime.timedelta(minutes=seat_timeout)
        except:
            pass

        # if this is the last token to issue,
        # then go ahead and clear stale tokens for this login
        if not isVerified and iTokens >= max_seats - 1:
            self.clearStaleTokens(login)

        try:
            from_ip = self.get_ip(request)
        except:
            traceback.print_exc()

        if isVerified:
            # just extend it
            now = DateTime()
            self.mapping2[token] = {
                'userid': login,
                'ip': from_ip,
                'startTime': now,
                'expireTime': DateTime(now.asdatetime() + td_seat_timeout)
            }

            if self.DEBUG:
                print "verifyToken:: logon= %s, IP= %s, startTime= %s, expireTime= %s" % (
                    self.mapping2.get(token)['userid'], from_ip,
                    self.mapping2.get(token)['startTime'],
                    self.mapping2.get(token)['expireTime'])
        elif iTokens < max_seats:

            now = DateTime()
            # if it already exists, add it
            self.mapping1[login]['tokens'].append(token)
            self.mapping2[token] = {
                'userid': login,
                'ip': from_ip,
                'startTime': now,
                'expireTime': DateTime(now.asdatetime() + td_seat_timeout)
            }

            if self.DEBUG:
                print "verifyToken:: after activate token, active tokens = " + ', '.join(
                    self.mapping1[login]['tokens'])

            # since this was activated, just ensure that the cookie in the browser reflects what is server side
            self.setCookie(token)
        else:
            # cannot issue cookie, so clear in browser-side
            #self.setCookie('')

            # if the token is not able to be issued because of max_seats filled,
            # then force logout, and show the message

            # Logout the
            # user by calling resetCredentials.  Note that this
            # will eventually call our own resetCredentials which
            # will cleanup our own cookie.
            try:
                self.resetAllCredentials(request, response)
                self._getPAS().plone_utils.addPortalMessage(
                    _(u"The maximum number of simultaneous logins for this user has been exceeded.  You have been \
                    logged out."), "error")
            except:
                traceback.print_exc()
Esempio n. 37
0
    def index_html(self, REQUEST):
        """ """
        uid = REQUEST.form.get('uid')
        if not uid:
            # a missing uid can only mean this page is called by accident
            return
        date_for_roles = REQUEST.form.get('date_for_roles')

        if "," in uid:
            user = None
            roles = None
            multi = json.dumps({'users': uid.split(",")})
        else:
            multi = None
            user, roles = self._prepare_user_page(uid)

        is_auth = _is_authenticated(REQUEST)
        # we can only connect to ldap with bind=True if we have an
        # authenticated user
        agent = self._get_ldap_agent(bind=is_auth)

        user_dn = agent._user_dn(uid)
        log_entries = list(reversed(agent._get_metadata(user_dn)))
        VIEWS = {}
        filtered_roles = set([info[0] for info in roles])  # + owner_roles)
        if date_for_roles:
            filter_date = DateTime(date_for_roles).asdatetime().date()
        else:
            filter_date = DateTime().asdatetime().date()

        for entry in log_entries:
            date = DateTime(entry['timestamp']).toZone("CET")
            entry['timestamp'] = date.ISO()
            view = VIEWS.get(entry['action'])
            if not view:
                view = getMultiAdapter((self, self.REQUEST),
                                       name="details_" + entry['action'])
                VIEWS[entry['action']] = view
            entry['view'] = view

            _roles = entry.get('data', {}).get('roles')
            _role = entry.get('data', {}).get('role')
            if date.asdatetime().date() >= filter_date:
                if entry['action'] == 'ENABLE_ACCOUNT':
                    filtered_roles.difference_update(set(_roles))
                elif entry['action'] == "DISABLE_ACCOUNT":
                    filtered_roles.update(set(_roles))
                elif entry['action'] in ["ADDED_TO_ROLE"]:
                    if _role and _role in filtered_roles:
                        filtered_roles.remove(_role)
                elif entry['action'] in ["REMOVED_FROM_ROLE"]:
                    if _role:
                        filtered_roles.add(_role)

        output = []
        for entry in log_entries:
            if output:
                last_entry = output[-1]
                check = ['author', 'action']
                flag = True
                for k in check:
                    if last_entry[k] != entry[k]:
                        flag = False
                        break
                if flag:
                    last_entry['data'].append(entry['data'])
                else:
                    entry['data'] = [entry['data']]
                    output.append(entry)
            else:
                entry['data'] = [entry['data']]
                output.append(entry)

        removed_roles = []
        if user.get('status') == 'disabled':
            auth_user = self.REQUEST.AUTHENTICATED_USER
            if not bool(auth_user.has_permission(eionet_edit_users, self)):
                raise NotFound("User '%s' does not exist" % uid)
            # process log entries to list the roles the user had before
            # being disabled
            for entry in log_entries:
                if entry['action'] == 'DISABLE_ACCOUNT':
                    for role in entry['data'][0]['roles']:
                        try:
                            role_description = agent.role_info(
                                role)['description']
                        except:
                            role_description = ("This role doesn't exist "
                                                "anymore")
                        removed_roles.append((role, role_description))
                    break

        return self._render_template("zpt/userdetails/index.zpt",
                                     context=self,
                                     filtered_roles=filtered_roles,
                                     user=user,
                                     roles=roles,
                                     removed_roles=removed_roles,
                                     multi=multi,
                                     log_entries=output)
Esempio n. 38
0
    def verifyToken(self, token, login, max_seats, request, response):
        """ Activates a token by putting it in the tokens[] array of mapping1[login] if it is not already present. """

	alsoProvides(request, IDisableCSRFProtection)

        isVerified = False # it is verified if it is already in the active tokens list server-side
        seat_timeout = 5 # default if there is a problem with the member property
        iTokens = 0 # assume no tokens are active until proven otherwise
        existing = self.mapping1.get(login)
        if existing and 'tokens' in existing:
            iTokens = len( existing['tokens'] )
            
            isVerified = token in existing['tokens']
            
            if self.DEBUG:
                print "authenticateCredentials():: cookie_val is " + token + ", and active tokens are: " + ', '.join( existing['tokens'] )
        else:
            self.mapping1[login] = { 'tokens':[] } # initialize tokens array for this login

        if self.DEBUG:
            print "verifyToken:: login = %s, active = %i, max = %i" % (login, iTokens, max_seats)
            
        try:
            # for seats > 1, use member property for cookie timeout value
            seat_timeout = self.getSeatTimeoutInMinutesForLogin(login)
            td_seat_timeout = datetime.timedelta(minutes=seat_timeout)
        except:
            pass
        
        # if this is the last token to issue,
        # then go ahead and clear stale tokens for this login
        if not isVerified and iTokens >= max_seats - 1:
            self.clearStaleTokens(login)
        
        try:
            from_ip = self.get_ip(request)
        except:
            traceback.print_exc()

        if isVerified:
            # just extend it
            now = DateTime()
            self.mapping2[token] = {'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime( now.asdatetime() + td_seat_timeout )}
            
            if self.DEBUG:
                print "verifyToken:: logon= %s, IP= %s, startTime= %s, expireTime= %s" % ( self.mapping2.get(token)['userid'], from_ip, self.mapping2.get(token)['startTime'], self.mapping2.get(token)['expireTime'] )
        elif iTokens < max_seats:

            now = DateTime()
            # if it already exists, add it
            self.mapping1[login]['tokens'].append( token )
            self.mapping2[token] = {'userid': login, 'ip': from_ip, 'startTime': now, 'expireTime': DateTime( now.asdatetime() + td_seat_timeout )}
            
            if self.DEBUG:
                print "verifyToken:: after activate token, active tokens = " + ', '.join(self.mapping1[login]['tokens'])

            # since this was activated, just ensure that the cookie in the browser reflects what is server side
            self.setCookie( token )
        else:
            # cannot issue cookie, so clear in browser-side
            #self.setCookie('')

            # if the token is not able to be issued because of max_seats filled,
            # then force logout, and show the message


            # Logout the
            # user by calling resetCredentials.  Note that this
            # will eventually call our own resetCredentials which
            # will cleanup our own cookie.
            try:
                self.resetAllCredentials(request, response)
                self._getPAS().plone_utils.addPortalMessage(_(
                    u"The maximum number of simultaneous logins for this user has been exceeded.  You have been \
                    logged out."), "error")
            except:
                traceback.print_exc()