Exemplo n.º 1
0
 def build_url(year):
     return '{}?q_filter={}'.format(
         lv_url,
         qfilter_serializer.dumps(
             Q(user=user.id) & Q(creation_date__year=year)),
     )
Exemplo n.º 2
0
            TrafficTicket.objects.bulk_create(ticket_batch)
            ticket_batch = []
            print('{} of {}'.format(i, len(raw_tickets)))


if __name__ == "__main__":

    # inicio el entorno de django
    parent_dir = os.path.dirname(
        os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
    os.sys.path.insert(0, parent_dir)
    os.environ.setdefault("DJANGO_SETTINGS_MODULE",
                          "madrid_geodjango.settings")
    django.setup()

    from madrid.models import TrafficTicket, MultasRaw

    # Paso 1: cargo las multas que contenían x,y
    raw_fines = MultasRaw.objects.filter(~Q(coordenada_x='           '))
    print('Updating {} fines with existing x, y'.format(len(raw_fines)))
    # update_fines(raw_fines)

    # Paso 2: cargo las multas geolocalizadas con el geocoder
    raw_fines = MultasRaw.objects.filter(
        Q(coordenada_x='           ') & ~Q(coordenada_y='           ')
        & ~Q(coordenada_y='1a vuelta - nada'))
    print('Updating {} geocoded fines'.format(len(raw_fines)))
    update_fines(raw_fines, geocoder_info=True)

    print('The end')
Exemplo n.º 3
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     # ensure it only ever fetches objects which are not in the recycle bin
     self.query.add_q(Q(deleted_at__isnull=True,
                        activated_at__isnull=False))
Exemplo n.º 4
0
def validate_conflict_join_envs(env_ip, equipments):

    models_env = get_app('ambiente', 'models')
    models_vrf = get_app('api_vrf', 'models')

    nums_vlan_rel_ip = [vlan.num_vlan for vlan in env_ip.vlans]

    for equipment in equipments:

        # Equipment without environment related, do not need validate
        # Validate if equipment is not in related in environment
        if equipment.id not in list(env_ip.eqpts):

            # Validate if equipment has environments related
            for env_rel in equipment.environments:
                env_rel_eqpt = env_rel.ambiente

                use_filter = True
                if env_rel_eqpt.filter != env_ip.filter:
                    use_filter = False

                # Exists differents filters, so is need to validate
                # all equipments
                eqpts = env_rel_eqpt.filtered_eqpts \
                    if use_filter else env_rel_eqpt.eqpts

                eqpts_env_ip = env_ip.filtered_eqpts \
                    if use_filter else env_ip.eqpts

                # Verify if vlans of environment of IP make conflict in
                # new relationship
                nums_vlan_rel_eqpt = [
                    vlan.num_vlan for vlan in env_rel_eqpt.vlans
                ]
                vlans_conflict = list(
                    set(nums_vlan_rel_eqpt) & set(nums_vlan_rel_ip))
                if vlans_conflict:
                    msg = 'VLANs {} already registred with same ' \
                        'number in equipments of environment: {}'
                    msg = msg.format(vlans_conflict, env_rel_eqpt.name)
                    log.error(msg)
                    raise models_env.IpErrorV3(msg)

                # Verify if networks of environment of IP make conflict in new
                # relationship
                for vlan_rel_ip in env_ip.vlans:
                    # Get vrfs of 1 vlan of environment of IP
                    vrfs_vlan_rel_ip = models_vrf.Vrf.objects.filter(
                        Q(
                            Q(vrfvlanequipment__equipment__in=eqpts)
                            & Q(vrfvlanequipment__vlan__id=vlan_rel_ip.id))
                        | Q(id=vlan_rel_ip.ambiente.default_vrf_id))

                    # Get vrfs of 1 vlan of environment related with
                    # equipment
                    for vlan_rel_eqpt in env_rel_eqpt.vlans:
                        vrfs_vlan_rel_eqpt = models_vrf.Vrf.objects.filter(
                            Q(
                                Q(vrfvlanequipment__equipment__in=eqpts_env_ip)
                                &
                                Q(vrfvlanequipment__vlan__id=vlan_rel_eqpt.id))
                            | Q(id=vlan_rel_eqpt.ambiente.default_vrf_id))

                        # Validate conflict of network if has intersect
                        # of vrfs
                        vrfs_intersect = list(
                            set(vrfs_vlan_rel_ip) & set(vrfs_vlan_rel_eqpt))

                        if vrfs_intersect:

                            netv4 = vrfs_vlan_rel_eqpt\
                                .networkipv4_set.filter()
                            netv6 = vrfs_vlan_rel_eqpt\
                                .networkipv6_set.filter()
                            netv4_eqpt = [
                                IPNetwork(net.networkv4) for net in netv4
                            ]
                            netv6_eqpt = [
                                IPNetwork(net.networkv6) for net in netv6
                            ]

                            netv4 = vrfs_vlan_rel_ip.networkipv4_set.filter()
                            netv6 = vrfs_vlan_rel_ip.networkipv6_set.filter()
                            netv4_ip = [
                                IPNetwork(net.networkv4) for net in netv4
                            ]
                            netv6_ip = [
                                IPNetwork(net.networkv6) for net in netv6
                            ]

                            verify_networks(netv4_ip, netv4_eqpt)
                            verify_networks(netv6_ip, netv6_eqpt)
Exemplo n.º 5
0
def do_archiviazione(user, end_date):
    # if settings.DEBUG:
    # raise Exception("Per archiviare devi uscire dal DEBUG mode.")

    filtroViaggi = Q(data__lt=end_date, padre__isnull=True)
    to_archive = Viaggio.objects.select_related(
        "da", "a", "cliente", "conducente", "passeggero").filter(filtroViaggi)
    # Optimizations: mi faccio dare solo i modelli che mi interessano
    # Rimovo l'ordinamento di default
    logAction("K",
              instance=user,
              description="Archiviazione fino al %s" % end_date,
              user=user)
    logger.debug("Archiviazione fino al %s cominciata" % end_date)

    # disabilita il log delle operazioni: cancello viaggi e cambio conducenti
    stopLog(Viaggio)
    stopLog(Conducente)
    ricordi = {}  # ricordi[conducente_id] = {chiaveClassifica=valore}
    archiviati = 0
    chunkSize = 500
    total = to_archive.count()

    logger.debug(u"Archivio %d viaggi padre." % total)
    # to_archive = to_archive[:1]  # TODO: temp debug 1 viaggio
    while to_archive:
        # split viaggi padre in chucks
        viaggi_chunk, to_archive = to_archive[:chunkSize], to_archive[
            chunkSize:]
        with transaction.atomic():
            with transaction.atomic(using='archive'):
                for viaggiopadre in viaggi_chunk:
                    archivio_viaggiopadre = archiveFromViaggio(viaggiopadre)
                    daRicordareDelViaggio(ricordi, viaggiopadre)
                    archivio_viaggiopadre.save()
                    archiviati += 1

                    for figlio in viaggiopadre.viaggio_set.select_related(
                            "da", "a", "cliente", "conducente",
                            "passeggero").order_by().all():
                        archivio_viaggiofiglio = archiveFromViaggio(figlio)
                        archivio_viaggiofiglio.padre = archivio_viaggiopadre
                        daRicordareDelViaggio(ricordi, figlio)
                        archivio_viaggiofiglio.save()
                    # archiviati += 1
                    viaggiopadre.delete()

                logger.debug("Modifico le classifiche e commit %d/%d" %
                             (archiviati, total))
                applyRicordi(ricordi)

    logger.debug("fine archiviazione")
    if archiviati:
        vacuum_db()

    logDaEliminare = ActionLog.objects.filter(data__lt=end_date)
    contaLog = logDaEliminare.count()
    if contaLog:
        logger.debug("Ora cancello tutti i vecchi LOG.")
        logDaEliminare.delete()
        vacuum_db(using='modellog')

    # riabilita il log delle operazioni
    startLog(Conducente)
    startLog(Viaggio)

    logger.debug("Archiviazione fino al %s completata." %
                 end_date.strftime("%d/%m/%Y"))
Exemplo n.º 6
0
    def save_assistance_record(self, record):

        if self.payroll_period.payroll_group.checker_type == PayrollGroup.CHECKER_TYPE_AUTOMATIC:
            employee_key = record[self.ElementPosition.ATM_EMPLOYEE_KEY_COL]
            record_date = record[self.ElementPosition.ATM_DATE_COL]
            entry_time_record = record[self.ElementPosition.ATM_ENTRY_COL]
            exit_time_record = record[self.ElementPosition.ATM_EXIT_COL]
            print "Automatic Assistance Upload"


        else:
            employee_key = record[self.ElementPosition.MANUAL_EMPLOYEE_KEY_COL]
            record_date = record[self.ElementPosition.MANUAL_DATE_COL]
            entry_time_record = record[self.ElementPosition.MANUAL_ENTRY_COL]
            exit_time_record = record[self.ElementPosition.MANUAL_EXIT_COL]
            print "Manual Assistance Upload"

        entry_time_record = datetime.datetime.strptime(entry_time_record.encode('ascii','ignore'), '%H:%M:%S').time()
        exit_time_record = datetime.datetime.strptime(exit_time_record.encode('ascii','ignore'), '%H:%M:%S').time()


        # Obtaining the related employee by theit key number. If the given employeee does not exist,
        # the system will throw an exception.
        employee_to_save = self.validate_employee_key(employee_key)

        # Validates that the given date is found between the limits of the selected payroll period.
        date_to_save = self.validate_date(record_date, employee_to_save.employee_key)

        # Validates the given fromat for the entry and exit time.
        entry_time_to_save = self.validate_entry_and_exit_time(entry_time_record, employee_to_save.employee_key)
        exit_time_to_save = self.validate_entry_and_exit_time(exit_time_record, employee_to_save.employee_key)

        employee_was_absent = self.check_if_absent(employee_to_save, entry_time_to_save, exit_time_to_save)

        # To check if the assistance exists before saving it.
        assistance_existed = EmployeeAssistance.objects.filter(
            Q(record_date = date_to_save) &
            Q(employee = employee_to_save) &
            Q(payroll_period=self.payroll_period))


        if len(assistance_existed) > 0:
            # If the assistance already existed, lets update the information.
            assistance_obj = assistance_existed[0]
            assistance_obj.entry_time = entry_time_to_save
            assistance_obj.exit_time = exit_time_to_save
            assistance_obj.absence = employee_was_absent

            # Maybe add a message that says that some records where updated.

            assistance_obj.save()

        else:
            assistance_obj = EmployeeAssistance(
                entry_time=entry_time_to_save,
                exit_time=exit_time_to_save,
                record_date = date_to_save,
                employee = employee_to_save,
                payroll_period=self.payroll_period,
                absence=employee_was_absent
            )



            assistance_obj.save()
    def check_instance_pools(self):
        instance_pools = InstancePool.objects.all()

        # Process all instance pools
        for pool in instance_pools:
            if not pool.isEnabled:
                continue

            current_delta = timezone.datetime.now() - timezone.timedelta(
                seconds=stats_delta_secs)
            entries = PoolUptimeDetailedEntry.objects.filter(
                pool=pool, created__gte=current_delta)

            # We should never have more than one entry per time-delta
            assert (len(entries) < 2)

            if entries:
                current_delta_entry = entries[0]
            else:
                # Create a new entry
                current_delta_entry = PoolUptimeDetailedEntry()
                current_delta_entry.pool = pool

            current_delta_entry.target = pool.config.flatten().size

            actual = len(
                Instance.objects.filter(pool=pool).filter(
                    Q(status_code=INSTANCE_STATE['pending'])
                    | Q(status_code=INSTANCE_STATE['running'])))
            if current_delta_entry.actual == None or actual < current_delta_entry.actual:
                current_delta_entry.actual = actual

            # This will only save if necessary, i.e. if the entry already existed and the values
            # have not changed, this will not cause I/O on the database with Django >=1.5
            current_delta_entry.save()

            # Now check if we need to aggregate some of the detail entries we have
            entries = PoolUptimeDetailedEntry.objects.filter(
                pool=pool).order_by('created')

            n = len(entries) - (stats_total_detailed * 60 *
                                60) / stats_delta_secs
            if n > 0:
                # We need to aggregate some entries
                entriesAggr = entries[0:n]

                for entry in entriesAggr:
                    # Figure out if we have an aggregated entry already with the same date
                    created = entry.created.date()
                    day_entries = PoolUptimeAccumulatedEntry.objects.filter(
                        pool=pool).filter(created__year=created.year,
                                          created__month=created.month,
                                          created__day=created.day)

                    # We should never have more than one entry per day
                    assert (len(day_entries) < 2)

                    if day_entries:
                        day_entry = day_entries[0]
                    else:
                        day_entry = PoolUptimeAccumulatedEntry()
                        day_entry.pool = pool
                        day_entry.created = entry.created
                        day_entry.uptime_percentage = 0.0

                    entry_percentage = 100
                    if entry.target > 0:
                        entry_percentage = (float(entry.actual) /
                                            entry.target) * 100

                    new_uptime_percentage = (
                        (float(day_entry.uptime_percentage) *
                         day_entry.accumulated_count) +
                        entry_percentage) / (day_entry.accumulated_count + 1)

                    day_entry.uptime_percentage = new_uptime_percentage
                    day_entry.accumulated_count = day_entry.accumulated_count + 1
                    day_entry.save()

                    # We can now delete our entry
                    entry.delete()

            # Finally check if we need to expire some accumulated entries
            entries = PoolUptimeAccumulatedEntry.objects.filter(
                pool=pool).order_by('created')

            n = len(entries) - stats_total_accumulated
            if n > 0:
                for entry in entries[0:n]:
                    entry.delete()
Exemplo n.º 8
0
 def to_python(self, value):
     return Q(**{'{}__icontains'.format(self.cell.value): value
                 }) if value else Q()
Exemplo n.º 9
0
 def _build_q_from_operations(self, operations):
     return Q(
         **{
             '{}__{}'.format(self.cell.value, op): number
             for op, number in operations
         })
 def date_clashes(self, start, end):
     return CalendarEvent.objects.filter(
         Q(start__range=(start, end)
           | Q(end__range=(start, end)))).exists()
Exemplo n.º 11
0
 def to_python(self, value):
     return Q()
Exemplo n.º 12
0
    def favorites(self):
        faves = Fave.objects.get_for_user(self.user)
        post_ct = ContentType.objects.get_for_model(Post).pk
        tag_ct = ContentType.objects.get_for_model(Tag).pk
        #        userprofile_ct = ContentType.objects.get_for_model(UserProfile).pk
        userpost_ct = ContentType.objects.get_for_model(User).pk

        favorites_list = {
            post_ct: [],
            tag_ct: [],
            #           userprofile_ct:[],
            userpost_ct: [],
        }
        itypes = InteractionType.objects.filter(
            name='read',
            content_type__in=favorites_list).values('content_type_id', 'pk')
        ctypes = dict((i["pk"], i["content_type_id"]) for i in itypes)
        for f in faves:
            favorites_list[f.content_type_id].append(f.object_id)
        fave_list = dict(
            (i["pk"], favorites_list[i["content_type_id"]]) for i in itypes)
        q = Q()
        for itype, f in fave_list.iteritems():
            q = q | (Q(interaction_type=itype) & Q(object_id__in=f))

        last_interactions = {}
        for li in self.interactions.filter(q):
            try:
                ct = ctypes[li.interaction_type_id]
                if not last_interactions.has_key(ct):
                    last_interactions[ct] = {}
                last_interactions[ct][li.object_id] = li
            except KeyError:
                pass

        faved_objects = {}
        faved_objects[post_ct] = dict((o.pk, {
            'last': o.last_reply,
            'obj': o,
            'type': 'post'
        }) for o in Post.objects.filter(pk__in=favorites_list[post_ct]))
        faved_objects[tag_ct] = dict((o.pk, {
            'last': o.timestamp,
            'obj': o,
            'type': 'tag'
        }) for o in Tag.objects.filter(pk__in=favorites_list[tag_ct]))
        #        faved_objects[userprofile_ct] = dict((o.pk, {'last':o.last_reply, 'obj':o, 'type':'userprofile'}) for o in UserProfile.objects.filter(pk__in = favorites_list[userprofile_ct]))
        faved_objects[userpost_ct] = dict((o.pk, {
            'last': o.get_profile().last_post_id,
            'obj': o.get_profile(),
            'type': 'user'
        }) for o in User.objects.filter(pk__in=favorites_list[userpost_ct]))

        link_prefix = {
            post_ct: '&para;',
            tag_ct: '.',
            userpost_ct: '@',
            #           userpost_ct:'by: ',
        }

        for f in faves:
            ct = f.content_type_id
            oid = f.object_id
            fo = faved_objects[ct][oid]
            obj = fo['obj']
            try:  #TODO: Should we link to the last interaction or to the starting point?
                f.link_start = int(
                    last_interactions[ct][oid].value.split(';')[0])
            except KeyError:  #This shouldn't actually ever happen, because there _should_ be an interaction of sorts.
                f.link_start = 0

            f.link_href = obj.get_absolute_url()
            f.link_title = link_prefix[ct] + getattr(obj, 'title',
                                                     obj.__unicode__())
            f.current = fo['last']
            f.fresh = fo.get('op', operator.lt)(f.link_start, f.current)
            f.remove = reverse('unfave_object',
                               kwargs={
                                   'fave_type_slug': 'star',
                                   'content_type_id': ct,
                                   'object_id': oid
                               })
        return faves
Exemplo n.º 13
0
def manage_users(request):

    if not request.user.has_perms(['can_view_user']):
        raise PermissionDenied('permission denied')

    message = ''
    admin_form = AdminUserForm()

    if request.is_ajax():
        table = request.POST.get('table', False)
        search = request.POST.get('search', False)
        if table and table == 'users':
            accounts = User.objects.filter(
                ~Q(user_status='pending'),
                Q(account_id__icontains=search)
                | Q(first_name__icontains=search)
                | Q(last_name__icontains=search))
            data = {}
            if accounts:
                for account in accounts:
                    d = {
                        'account_id':
                        account.account_id,
                        'first_name':
                        account.first_name,
                        'last_name':
                        account.last_name,
                        'address':
                        account.address,
                        'email':
                        account.email,
                        'contact':
                        account.contact,
                        'username':
                        account.username,
                        'user_type':
                        account.user_type,
                        'is_superuser':
                        account.is_superuser,
                        'user_status':
                        account.user_status,
                        'date_joined':
                        date.strftime(account.date_joined,
                                      "%b. %d, %Y, %I:%M %P"),
                    }
                    data[str(account.date_joined)] = d
                return JsonResponse(data)
            else:
                return JsonResponse(data)

    if request.POST:
        admin_form = AdminUserForm(request.POST, request.FILES)
        try:
            my_user = User.objects.get(account_id=request.POST['account_id'])
        except User.DoesNotExist:
            my_user = ''

        if my_user:
            perms = get_corrected_permissions(my_user)
            if not request.POST.get('is_super_user', False):
                for perm in perms:
                    my_user.user_permissions.remove(perm)

            admin_form = AdminUserForm(request.POST,
                                       request.FILES,
                                       instance=my_user)
            current_password = my_user.password

            if admin_form.is_valid():
                updataed_model = admin_form.save()
                updataed_model.password = current_password
                updataed_model.save()

                admin_form = AdminUserForm()
                message = 'user Information has been updated'

        elif admin_form.is_valid():
            admin_form.save()
            admin_form = AdminUserForm()
            message = 'New user has been registered and is active'

    old_users_list = User.objects.filter(~Q(user_status='pending'))
    new_users_list = User.objects.filter(Q(user_status='pending'))

    return render(
        request, 'users/user_form_admin.html', {
            'title': "User Manager",
            'search_table': 'users',
            'to_print': 'Members List',
            'what_to_print': 'users',
            'form': admin_form,
            'new_users': new_users_list,
            'old_users': old_users_list,
            'message': message,
        })
Exemplo n.º 14
0
    def get(self, request):
        user = self.get_object(request)
        if user is None:
            return Response(
                {'status': 'Unauthorized', 'message': 'You are not logged in'},
                status=status.HTTP_401_UNAUTHORIZED
            )

        user_fields = ['first_name', 'last_name', 'avatar_url']
        profile_fields = ['skills', 'bio', 'country', 'city', 'street', 'plot_number', 'postal_code']
        optional_fields = ['skills', 'bio', 'avatar_url', 'phone_number', 'tel_number']

        if request.user.is_developer or request.user.is_project_manager:
            profile_fields.extend(['id_document', 'phone_number'])
        elif user.is_project_owner and user.tax_location == 'europe':
            profile_fields.extend(['name', 'vat_number', 'tel_number'])

        missing_required = []
        missing_optional = []

        for group in [
            [request.user, user_fields],
            [request.user.is_project_owner and request.user.company or request.user.profile, profile_fields]
        ]:
            for field in group[1]:
                if not getattr(group[0], field, None):
                    if field in optional_fields:
                        missing_optional.append(field)
                    else:
                        missing_required.append(field)

        running_projects = Project.objects.filter(
            Q(user=request.user) |
            Q(pm=request.user) |
            Q(owner=request.user) |
            (
                Q(participation__user=request.user) &
                Q(participation__status__in=[STATUS_INITIAL, STATUS_ACCEPTED])
            ), archived=False
        ).distinct()

        today_end = datetime.datetime.utcnow().replace(hour=23, minute=59, second=59, microsecond=999999)
        unpaid_invoices = Invoice.objects.filter(
            user=request.user, paid=False, issued_at__lte=today_end
        ).order_by('issued_at')[:5]

        upcoming_progress_events = ProgressEvent.objects.filter(
            (
                Q(project__pm=request.user) &
                Q(type__in=[PROGRESS_EVENT_PM, PROGRESS_EVENT_MILESTONE])
            ) |
            (
                Q(project__participation__user=request.user) &
                Q(project__participation__status=STATUS_ACCEPTED) &
                Q(type__in=[PROGRESS_EVENT_DEVELOPER, PROGRESS_EVENT_MILESTONE])
            ),
            ~Q(progressreport__user=request.user),
            due_at__gt=datetime.datetime.utcnow() - relativedelta(hours=24)
        ).order_by('due_at').distinct()[:4]

        progress_reports = ProgressReport.objects.filter(
            Q(event__project__user=request.user) |
            Q(event__project__pm=request.user) |
            Q(event__project__owner=request.user),
            user__type=USER_TYPE_DEVELOPER,
            event__type__in=[PROGRESS_EVENT_DEVELOPER, PROGRESS_EVENT_MILESTONE]
        ).distinct()[:4]

        raw_activities = Action.objects.filter(
            ~Q(id__in=[int(item.notification_id) for item in
                       NotificationReadLog.objects.filter(user=user, type=NOTIFICATION_TYPE_ACTIVITY)]) &
            (
                Q(projects__in=running_projects) | Q(progress_events__project__in=running_projects)
            ),
            action_object_content_type__in=[
                ContentType.objects.get_for_model(model) for model in [Document, Participation, Invoice, FieldChangeLog]
            ],
        )

        invoice_type = ContentType.objects.get_for_model(Invoice)

        cleaned_activities = []
        is_client = not (user.is_admin or user.is_project_manager or user.is_developer)
        for activity in raw_activities:
            if (not is_client) or activity.action_object_content_type != invoice_type or activity.action_object.is_due:
                cleaned_activities.append(activity)
        cleaned_activities = cleaned_activities[:15]

        cleared_notifications = [
            item.notification_id for item in NotificationReadLog.objects.filter(
                user=user, type=NOTIFICATION_TYPE_PROFILE
            )
        ]

        return Response(
            {
                'profile': dict(
                    required=missing_required,
                    optional=missing_optional,
                    cleared=cleared_notifications
                ),
                'projects': [dict(
                    id=project.id,
                    title=project.title
                ) for project in running_projects],
                'invoices': [dict(
                    id=invoice.id,
                    title=invoice.title,
                    full_title=invoice.full_title,
                    issued_at=invoice.issued_at,
                    due_at=invoice.due_at,
                    is_overdue=invoice.is_overdue,
                    project=dict(
                        id=invoice.project.id,
                        title=invoice.project.title
                    )
                ) for invoice in unpaid_invoices],
                'events': [dict(
                    id=event.id,
                    title=event.title,
                    type=event.type,
                    due_at=event.due_at,
                    project=dict(
                        id=event.project.id,
                        title=event.project.title
                    )
                ) for event in upcoming_progress_events],
                'reports': [dict(
                    id=report.id,
                    created_at=report.created_at,
                    status=report.status,
                    percentage=report.percentage,
                    user=dict(
                        id=report.user.id,
                        username=report.user.username,
                        display_name=report.user.display_name
                    ),
                    event=dict(
                        id=report.event.id,
                        title=report.event.title,
                        type=report.event.type,
                        due_at=report.event.due_at,
                    ),
                    project=dict(
                        id=report.event.project.id,
                        title=report.event.project.title
                    ),
                ) for report in progress_reports],
                'activities': ActivitySerializer(instance=cleaned_activities, many=True,
                                                 context=dict(request=request)).data
            },
            status=status.HTTP_200_OK
        )
Exemplo n.º 15
0
def access(
    request,
    wid: Optional[int],
    select_related: Optional[Union[str, List]] = None,
    prefetch_related: Optional[Union[str, List]] = None,
) -> Optional[Workflow]:
    """Verify that the workflow stored in the request can be accessed.

    :param request: HTTP request object

    :param wid: ID of the workflow that is being requested

    :param select_related: Field to add as select_related query filter

    :param prefetch_related: Field to add as prefetch_related query filter

    :return: Workflow object or raise exception with message
    """
    # Lock the workflow object while deciding if it is accessible or not to
    # avoid race conditions.
    sid = request.session.get('ontask_workflow_id')
    if wid is None and sid is None:
        # No key was given and none was found in the session (anomaly)
        return None

    if wid is None:
        # No WID provided, but the session contains one, carry on
        # with this one
        wid = sid
    elif sid != wid:
        Workflow.unlock_workflow_by_id(sid)

    with cache.lock('ONTASK_WORKFLOW_{0}'.format(wid)):

        # Step 1: Get the workflow that is being accessed
        workflow = Workflow.objects.filter(id=wid).filter(
            Q(user=request.user) | Q(shared__id=request.user.id), )

        if not workflow:
            return None

        # Apply select and prefetch if given
        if select_related:
            if isinstance(select_related, list):
                workflow = workflow.select_related(*select_related)
            else:
                workflow = workflow.select_related(select_related)
        if prefetch_related:
            if isinstance(prefetch_related, list):
                workflow = workflow.prefetch_related(*prefetch_related)
            else:
                workflow = workflow.prefetch_related(prefetch_related)

        # Now get the unique element from the query set
        workflow = workflow.first()

        # Step 2: If the workflow is locked by this user session, return
        # correct result (the session_key may be None if using the API)
        if request.session.session_key == workflow.session_key:
            # Update nrows. Asynch execution of plugin may have modified it
            store_workflow_nrows_in_session(request, workflow)
            return workflow

        # Step 3: If the workflow is unlocked, LOCK and return
        if not workflow.session_key:
            # Workflow is unlocked. Proceed to lock
            return wf_lock_and_update(request, workflow, create_session=True)

        # Step 4: The workflow is locked by a session different from this one.
        # See if the session locking it is still valid
        session = Session.objects.filter(
            session_key=workflow.session_key, ).first()
        if not session:
            # The session stored as locking the
            # workflow is no longer in the session table, so the user can
            # access the workflow
            return wf_lock_and_update(request, workflow, create_session=True)

        # Get the owner of the session locking the workflow
        user_id = session.get_decoded().get('_auth_user_id')
        if not user_id:
            # Session has no user_id, so proceed to lock the workflow
            return wf_lock_and_update(request, workflow)

        owner = get_user_model().objects.get(id=user_id)

        # Step 5: The workflow is locked by a session that is valid. See
        # if the session locking happens to be from the same user (a
        # previous session that has not been properly closed, or an API
        # call from the same user)
        if owner == request.user:
            return wf_lock_and_update(request, workflow)

        # Step 6: The workflow is locked by an existing session. See if the
        # session is valid
        if session.expire_date >= timezone.now():
            raise OnTaskException(
                _('The workflow is being modified by user {0}').format(
                    owner.email), )

        # The workflow is locked by a session that has expired. Take the
        # workflow and lock it with the current session.
        return wf_lock_and_update(request, workflow)
Exemplo n.º 16
0
 def _get_q_for_choice(self, choice_value):
     return Q(**{self.cell.value: choice_value})
Exemplo n.º 17
0
 def get_slides(cls, lang=None):
     ogeler = cls.objects.filter(active=True).select_related()
     if lang:
         ogeler = ogeler.filter(Q(dil_kodu=lang)|Q(dil_kodu__isnull=True))
     return ogeler
Exemplo n.º 18
0
 def _get_q_for_null_choice(self):
     return Q(**{'{}__isnull'.format(self.cell.value): True})
Exemplo n.º 19
0
def merge_contacts(c1, c2):
    details = ContactDetail.objects.filter(owner=c1)
    for d in details:
        d.owner = c2
        d.save()

    uas = UserAccount.objects.filter(company=c1)
    for ua in uas:
        ua.company = c2
        ua.save()

    # company account
    try:
        ca = CompanyAccount.objects.get(contact=c1)
        ca.delete()
    except CompanyAccount.DoesNotExist:
        pass

    def move(clazz):
        txns = clazz.objects.filter(Q(supplier=c1) | Q(customer=c1))
        for t in txns:
            if t.supplier == c1:
                t.supplier = c2
                t.save()
            elif t.customer == c1:
                t.customer = c2
                t.save()

    # orders
    move(Order)
    # bill
    move(Bill)
    # payment
    move(Payment)
    # expense

    es = Expense.objects.filter(Q(owner=c1) | Q(contact=c1))
    for e in es:
        if e.owner == c1:
            e.owner = c2
            e.save()
        elif e.contact == c1:
            e.contact = c2
            e.save()

    # trade account
    # copy over debt and credit
    tas1 = TradeAccount.objects.filter(Q(supplier=c1) | Q(customer=c1))
    for ta1 in tas1:
        if ta1.supplier == c1:
            try:
                ta2 = TradeAccount.objects.get(supplier=c2,
                                               customer=ta1.customer)
                ta2.debt += ta1.debt
                ta2.credit += ta1.credit
                ta2.save()
                ta1.delete()
            except TradeAccount.DoesNotExist:
                ta1.supplier = c2
                ta1.save()
        elif ta1.customer == c1:
            try:
                ta2 = TradeAccount.objects.get(customer=c2,
                                               supplier=ta1.supplier)
                ta2.debt += ta1.debt
                ta2.credit += ta1.credit
                ta2.save()
                ta1.delete()
            except TradeAccount.DoesNotExist:
                ta1.customer = c2
                ta1.save()

    # location
    # copy over 'Default', otherwise doble
    try:
        l1 = c1.locations.get(name=Location.DEFAULT)
    except Location.DoesNotExist:
        l1 = None
    try:
        l2 = c2.locations.get(name=Location.DEFAULT)
    except:
        l2 = None

    if l1 and l2:
        merge_locations(l1, l2)
    elif l1:
        l1.owner = c2
    c1.delete()
Exemplo n.º 20
0
 def to_python(self, value):
     return Q(**{'{}__header_filter_search_field__icontains'.format(self.cell.value): value}) \
            if value else \
            super().to_python(value=value)
 def get_all_users_stats_except(self):
     this_user = get_object_or_404(User, username=self.username)
     all_user_stats = get_list_or_404(UserStatistics, ~Q(user=this_user))
     print(all_user_stats)
     return all_user_stats
Exemplo n.º 22
0
    def get_context_data(self, **kwargs):
        user = User.objects.get(username=self.request.user.username)
        pk = user.id
        context = super(DashBoard, self).get_context_data(**kwargs)
        cursor = connection.cursor()
        cursor.execute("SELECT EXTRACT(WEEK FROM tw.date) AS regweek,date_trunc('week',tw.date),SUM(tw.time_spent) FROM timetracking_timesheetweekdata tw INNER JOIN timetracking_timesheetdata td ON tw.timesheet_id = td.id  where td.user_id = %s  GROUP BY regweek,date_trunc('week',tw.date) ORDER BY regweek ",[pk])
        results = cursor.fetchall()
        total_time = []
        for result in results:
            start_date = result[1]
            end_date = start_date+timedelta(days = 6)
            start_date = start_date.strftime('%m/%d/%Y')
            end_date = end_date.strftime('%m/%d/%Y')
            time_list = {'week': '', 'time': '', 'start_date': '', 'end_date': ''}
            time_list['week'] = int(result[0]) - 1
            time_list['start_date'] = start_date
            time_list['end_date'] = end_date
            time_list['time'] = int(result[2])
            total_time.append(time_list)

        context['results'] = total_time
        cursor.execute("SELECT td.user_id,EXTRACT(WEEK FROM tw.date) AS regweek,date_trunc('week',tw.date),SUM(tw.time_spent) FROM timetracking_timesheetweekdata tw INNER JOIN timetracking_timesheetdata td ON tw.timesheet_id = td.id  GROUP BY td.user_id,regweek,date_trunc('week',tw.date) ORDER BY regweek ")
        users_week_time = cursor.fetchall()
        total_time_user = []

        for result in users_week_time:
            start_date = result[2]
            end_date = start_date+timedelta(days=6)
            start_date = start_date.strftime('%m/%d/%Y')
            end_date = end_date.strftime('%m/%d/%Y')
            time_list = {'id': '', 'week': '', 'time': '', 'start_date': '', 'end_date': ''}
            time_list['id'] = int(result[0])
            time_list['week'] = int(result[1]) - 1
            time_list['start_date'] = start_date
            time_list['end_date'] = end_date
            time_list['time'] = int(result[3])
            total_time_user.append(time_list)

        user = User.objects.get(username= self.request.user.username)
        project = Project.objects.filter(projectassignment__assigned_to=user, projectassignment__admin_flag=True)
        team_members = User.objects.filter(projectassignment__project=project, projectassignment__admin_flag=False)
        context['timesheet_flag'] = ""
        if TimeSheetData.objects.filter(user=self.request.user):
            context['timesheet_flag'] = True
        context['team_admin'] = ""
        context['org_admin'] = ""
        context['project_assigned'] = ProjectAssignment.objects.filter(assigned_to=self.request.user.id)
        if ProjectAssignment.objects.filter(assigned_to=user.id, admin_flag=True):
            context['team_admin'] = "True"
            context['users_week_time'] = total_time_user
            context['team_members'] = team_members
            context['members'] = User.objects.filter(projectassignment__project=project,
                                                     projectassignment__admin_flag=False).distinct()
            context['projects'] = project
        if user.user_type == 1:
            context['org_admin'] = "True"
            context['users_week_time'] = total_time_user
            context['all_members'] = User.objects.filter(~Q(id=self.request.user.id))
            context['members'] = User.objects.filter(~Q(id=self.request.user.id))
            context['projects'] = Project.objects.all()
        if TimeSheetData.objects.filter(user=user):
            timesheet = TimeSheetData.objects.filter(user=user)
            context['timesheet'] = timesheet
            context['weekdata'] = TimeSheetWeekData.objects.filter(timesheet__user=user).order_by('date')
        return context
Exemplo n.º 23
0
 def get_extra_filter(self):
     q = []
     for field in self.extra_params_values:
         q.append(Q(**{field :self.extra_params_values[field]}))
     return reduce(operator.and_, q)
Exemplo n.º 24
0
 def filter_search(self, queryset, name, value):
     return queryset.filter(
         Q(archival_unit__title__icontains=value)
         | Q(archival_unit_legacy_name__icontains=value)
         | Q(title__icontains=value))
Exemplo n.º 25
0
def articleSearch(request):
    search = request.GET.get('search')
    articles = Article.objects.filter(
        Q(title__icontains=search) | Q(content__icontains=search))
    context = {'articles': articles, 'search': search}
    return render(request, 'articleSearch.html', context)
Exemplo n.º 26
0
 def __and__(self, other):
     if getattr(self, 'conditional', False) and getattr(other, 'conditional', False):
         return Q(self) & Q(other)
     raise NotImplementedError(
         "Use .bitand() and .bitor() for bitwise logical operations."
     )
Exemplo n.º 27
0
 def filter_list_queryset(self, request, queryset, view):
     return queryset.filter(
         Q(from_user=request.user) | Q(to__user=request.user))
Exemplo n.º 28
0
def get_visible_pages(request, pages, site=None):
    """
     This code is basically a many-pages-at-once version of
     Page.has_view_permission.
     pages contains all published pages
     check if there is ANY restriction
     that needs a permission page visibility calculation
    """
    is_setting_public_all = settings.CMS_PUBLIC_FOR == 'all'
    is_setting_public_staff = settings.CMS_PUBLIC_FOR == 'staff'
    is_auth_user = request.user.is_authenticated()

    visible_page_ids = []
    restricted_pages = defaultdict(list)
    pages_perms_q = Q()

    for page in pages:
        # taken from for_page as multiple at once version
        page_q = Q(page__tree_id=page.tree_id) & (
            Q(page=page)
            | (Q(page__level__lt=page.level) &
               (Q(grant_on=ACCESS_DESCENDANTS)
                | Q(grant_on=ACCESS_PAGE_AND_DESCENDANTS)))
            | (Q(page__level=page.level - 1) &
               (Q(grant_on=ACCESS_CHILDREN)
                | Q(grant_on=ACCESS_PAGE_AND_CHILDREN))))
        pages_perms_q |= page_q

    pages_perms_q &= Q(can_view=True)
    page_permissions = PagePermission.objects.filter(
        pages_perms_q).select_related('page', 'group__users')

    for perm in page_permissions:
        # collect the pages that are affected by permissions
        if perm is not None and perm not in restricted_pages[perm.page.pk]:
            # affective restricted pages gathering
            # using mptt functions
            # add the page with the perm itself
            if perm.grant_on in [
                    ACCESS_PAGE, ACCESS_PAGE_AND_CHILDREN,
                    ACCESS_PAGE_AND_DESCENDANTS
            ]:
                restricted_pages[perm.page.pk].append(perm)
            # add children
            if perm.grant_on in [ACCESS_CHILDREN, ACCESS_PAGE_AND_CHILDREN]:
                child_ids = perm.page.get_children().values_list('id',
                                                                 flat=True)
                for id in child_ids:
                    restricted_pages[id].append(perm)
            # add descendants
            elif perm.grant_on in [
                    ACCESS_DESCENDANTS, ACCESS_PAGE_AND_DESCENDANTS
            ]:
                child_ids = perm.page.get_descendants().values_list('id',
                                                                    flat=True)
                for id in child_ids:
                    restricted_pages[id].append(perm)
    # anonymous
    # no restriction applied at all
    if (not is_auth_user and is_setting_public_all and not restricted_pages):
        return [page.pk for page in pages]

    if site is None:
        site = current_site(request)

    # authenticated user and global permission
    if is_auth_user:
        global_page_perm_q = Q(
            Q(user=request.user)
            | Q(group__user=request.user)) & Q(can_view=True) & Q(
                Q(sites__in=[site.pk]) | Q(sites__isnull=True))
        global_view_perms = GlobalPagePermission.objects.filter(
            global_page_perm_q).exists()

        #no page perms edgcase - all visible
        if ((is_setting_public_all or
             (is_setting_public_staff and request.user.is_staff))
                and not restricted_pages and not global_view_perms):
            return [page.pk for page in pages]
        #no page perms edgcase - none visible
        elif (is_setting_public_staff and not request.user.is_staff
              and not restricted_pages and not global_view_perms):
            return []

    def has_global_perm():
        if has_global_perm.cache < 0:
            has_global_perm.cache = 1 if request.user.has_perm(
                'cms.view_page') else 0
        return bool(has_global_perm.cache)

    has_global_perm.cache = -1

    def has_permission_membership(page):
        """
        PagePermission user group membership tests
        """
        user_pk = request.user.pk
        page_pk = page.pk
        has_perm = False
        for perm in restricted_pages[page_pk]:
            if perm.user_id == user_pk:
                has_perm = True
            if not perm.group_id:
                continue
            group_user_ids = perm.group.user_set.values_list('pk', flat=True)
            if user_pk in group_user_ids:
                has_perm = True
        return has_perm

    for page in pages:
        to_add = False
        # default to false, showing a restricted page is bad
        # explicitly check all the conditions
        # of settings and permissions
        is_restricted = page.pk in restricted_pages
        # restricted_pages contains as key any page.pk that is
        # affected by a permission grant_on
        if is_auth_user:
            # a global permission was given to the request's user
            if global_view_perms:
                to_add = True
            # setting based handling of unrestricted pages
            elif not is_restricted and (is_setting_public_all or
                                        (is_setting_public_staff
                                         and request.user.is_staff)):
                # authenticated user, no restriction and public for all
                # or
                # authenticated staff user, no restriction and public for staff
                to_add = True
            # check group and user memberships to restricted pages
            elif is_restricted and has_permission_membership(page):
                to_add = True
            elif has_global_perm():
                to_add = True
        # anonymous user, no restriction
        elif not is_restricted and is_setting_public_all:
            to_add = True
        # store it
        if to_add:
            visible_page_ids.append(page.pk)
    return visible_page_ids
Exemplo n.º 29
0
 def __init__(self, *args, **kwargs):
     super().__init__(*args, **kwargs)
     # ensure it only ever fetches objects that are in the recycle bin
     self.query.add_q(Q(deleted_at__isnull=False))
Exemplo n.º 30
0
    def posicao_em(self, data_hora):
        """Retora a posição do jogador na ladder na data/hora especificada"""
        # Verificar Season para data/hora
        season = Season.objects.filter(
            data_inicio__lte=data_hora.date()).order_by('-data_inicio')[0]

        # Buscar última remoção da ladder, posição será definida a partir de sua data
        if RemocaoJogador.objects.filter(
                jogador=self,
                data__lt=data_hora,
                data__gte=season.data_hora_inicio).exists():
            data_inicial = RemocaoJogador.objects.filter(
                jogador=self).order_by('-data')[0].data
            desafios_validados_considerando_remocao = DesafioLadder.validados.na_season(
                season).filter(data_hora__gt=data_inicial)
        else:
            data_inicial = None
            desafios_validados_considerando_remocao = DesafioLadder.validados.na_season(
                season)

        # Buscar último desafio participado
        desafio_mais_recente = None

        if desafios_validados_considerando_remocao.filter(
                data_hora__lt=data_hora).filter(
                    Q(desafiante=self) | Q(desafiado=self)).exists():
            desafio_mais_recente = desafios_validados_considerando_remocao.filter(data_hora__lt=data_hora) \
                .filter(Q(desafiante=self) | Q(desafiado=self)).annotate(
                    posicao=Case(When(Q(desafiante=self) & Q(score_desafiante__gt=F('score_desafiado')), then=F('posicao_desafiado')),
                                 When(Q(desafiante=self) & Q(score_desafiante__lt=F('score_desafiado')), then=F('posicao_desafiante')),
                                 When(Q(desafiado=self) & Q(score_desafiante__gt=F('score_desafiado')), then=F('posicao_desafiado') + 1),
                                 When(Q(desafiado=self) & Q(score_desafiante__lt=F('score_desafiado')), then=F('posicao_desafiado')))) \
                                 .order_by('-data_hora')[0]

        if desafio_mais_recente:
            #             print(f'Desafio mais recente de {self}: {desafio_mais_recente.id} com {desafio_mais_recente.desafiante} VS {desafio_mais_recente.desafiado}')
            posicao = desafio_mais_recente.posicao

            # Buscar alterações feitas por remoções de outros jogadores
            posicao += (ResultadoRemocaoJogador.objects.filter(remocao__data__range=[desafio_mais_recente.data_hora, data_hora], jogador=self) \
                .exclude(remocao__data=data_hora).aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)

            #             print(self, ResultadoDesafioLadder.objects.filter(jogador=self, desafio_ladder__data_hora__range=[desafio_mais_recente.data_hora, data_hora]) \
            #                 .exclude(desafio_ladder=desafio_mais_recente).exclude(desafio_ladder__data_hora=data_hora) \
            #                 .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)

            # Buscar alterações feitas por desafios de outros jogadores
            posicao += (ResultadoDesafioLadder.objects.filter(jogador=self, desafio_ladder__data_hora__range=[desafio_mais_recente.data_hora, data_hora]) \
                .filter(desafio_ladder__admin_validador__isnull=False, desafio_ladder__cancelamentodesafioladder__isnull=True) \
                .exclude(desafio_ladder=desafio_mais_recente).exclude(desafio_ladder__data_hora=data_hora) \
                .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)

            # Buscar alterações feitas por decaimentos de outros jogadores
            posicao += (ResultadoDecaimentoJogador.objects.filter(jogador=self, decaimento__data__range=[desafio_mais_recente.data_hora, data_hora]) \
                .exclude(decaimento__data=data_hora) \
                .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)

            return posicao

        # Se não houver desafios cadastrados, verificar última ladder
        # Definir ultima ladder
        mes, ano = mes_ano_ant(data_hora.month, data_hora.year)

        if HistoricoLadder.objects.filter(ano=ano, mes=mes).exists():
            # Se houver remoção e ela ocorrer no mês atual, ignorar busca e retornar como novo entrante
            if data_inicial and data_inicial.month == data_hora.month and data_inicial.year == data_hora.year:
                return 0
            ultima_ladder = HistoricoLadder.objects.filter(ano=ano, mes=mes)
        else:
            # Se houver remoção, ignorar busca por ladder inicial
            if data_inicial:
                return 0
            ultima_ladder = InicioLadder.objects.all()

        if ultima_ladder.filter(jogador=self).exists():
            posicao = ultima_ladder.get(jogador=self).posicao

            #             print(self, ResultadoDesafioLadder.objects.filter(jogador=self, desafio_ladder__data_hora__lt=data_hora) \
            #                 .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)

            # Buscar alterações feitas por alterações de outros jogadores
            if HistoricoLadder.objects.filter(ano=ano, mes=mes).exists():
                # Remoções
                posicao += (ResultadoRemocaoJogador.objects.filter(remocao__data__lt=data_hora, jogador=self) \
                            .filter(remocao__data__month=data_hora.month, remocao__data__year=data_hora.year)
                            .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)
                #                 posicao -= RemocaoJogador.objects.filter(data__lt=data_hora, posicao_jogador__lt=posicao) \
                #                     .filter(data__month=data_hora.month, data__year=data_hora.year).count()

                # Desafios
                posicao += (ResultadoDesafioLadder.objects.filter(jogador=self, desafio_ladder__data_hora__lt=data_hora) \
                    .filter(desafio_ladder__data_hora__month=data_hora.month, desafio_ladder__data_hora__year=data_hora.year,
                            desafio_ladder__admin_validador__isnull=False, desafio_ladder__cancelamentodesafioladder__isnull=True) \
                    .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)

                # Decaimentos
                posicao += (ResultadoDecaimentoJogador.objects.filter(jogador=self, decaimento__data__lt=data_hora) \
                    .filter(decaimento__data__month=data_hora.month, decaimento__data__year=data_hora.year)
                    .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)
            else:
                # Remoções
                #                 posicao -= RemocaoJogador.objects.filter(data__lt=data_hora, posicao_jogador__lt=posicao).count()
                posicao += (ResultadoRemocaoJogador.objects.filter(remocao__data__lt=data_hora, remocao__data__gte=season.data_hora_inicio,
                                                                   jogador=self).aggregate(alteracao_total= \
                                                                                           Sum('alteracao_posicao'))['alteracao_total'] or 0)

                # Desafios
                posicao += (ResultadoDesafioLadder.objects.filter(jogador=self, desafio_ladder__data_hora__lt=data_hora,
                            desafio_ladder__data_hora__gte=season.data_hora_inicio, desafio_ladder__admin_validador__isnull=False,
                            desafio_ladder__cancelamentodesafioladder__isnull=True) \
                    .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)

                # Decaimentos
                posicao += (ResultadoDecaimentoJogador.objects.filter(jogador=self, decaimento__data__lt=data_hora,
                                                                      decaimento__data__gte=season.data_hora_inicio) \
                    .aggregate(alteracao_total=Sum('alteracao_posicao'))['alteracao_total'] or 0)

            return posicao

        # Novos entrantes começam na posição 0
        return 0