Exemple #1
0
    def save(self, *args, **kwargs):
        if self.receiver == self.creator:
            raise Error("Self-banning is prohibited")
        if not self.creator.is_staff:
            raise Error("Bans can only be created by admins")
        if self.receiver.is_staff:
            raise Error("Admins cannot be banned")

        super(Ban, self).save(*args, **kwargs)
Exemple #2
0
def select_write_db(model_name):
    db_list = Databases.objects.all().filter(model_name=model_name)
    if db_list.count() == 0:
        raise Error(f"Database for {model_name} model dosen't exist")

    for db in db_list:
        if db.count < settings.DATABASE_MAX_ROWS:
            return str(db)
    raise Error("Database full")
Exemple #3
0
def select_write_db(model_name):
    db_list = Databases.objects.all().filter(model_name=model_name)

    if db_list.count() == 0:
        raise Error(
            f"No database for {model_name} Model, please add it from admin")

    for db in db_list:
        if db.count < settings.DATABASE_MAX_ROWS:
            return db
    raise Error("Database full")
Exemple #4
0
    def save(self, *args, **kwargs):
        attachments_count = Attachment.objects.all().filter(
            post=self.post).count()

        if attachments_count >= 5:
            raise Error("Maximum 5 attachments per post")

        super(Attachment, self).save(*args, **kwargs)
Exemple #5
0
    def test_status_health_failed(self, mocked_connection):
        mocked_cursor = mock.MagicMock()
        mocked_cursor.execute.side_effect = Error()
        mocked_connection.cursor.return_value.__enter__.return_value = mocked_cursor

        response = self.client.get('/status/health')

        self.assertEqual(response.status_code, 500)
        self.assertEqual(response.content, b'Database connectivity failed')
Exemple #6
0
    def __init__(self):

        service = OAuthService.objects.filter(name='microsoft').first()
        if service is None:
            raise Error('No Microsoft app to manage AD')

        self.token = service.provider.retrieve_app_token()
        if self.token is None:
            raise AttributeError('Should ba a user with O365 token')
Exemple #7
0
    def test_health_not_ok(self, mocked_connection):
        mocked_cursor = mock.MagicMock()
        mocked_cursor.execute.side_effect = Error()
        mocked_connection.cursor.return_value.__enter__.return_value = mocked_cursor

        url = reverse('health')
        response = self.client.get(url)

        self.assertEqual(response.status_code,
                         status.HTTP_500_INTERNAL_SERVER_ERROR)
    def test_luy_to_duplicate_with_error(self, mock_method):
        mock_method.side_effect = Mock(side_effect=Error("test error"))

        luy_with_error = LearningUnitYearFactory(
            learning_unit=self.learning_unit,
            academic_year=self.academic_years[-2],
        )
        self.assertEqual(LearningUnitYear.objects.count(), 1)

        result, errors = LearningUnitAutomaticPostponementToN6().postpone()
        self.assertEqual(errors, [luy_with_error.learning_unit])
        self.assertEqual(len(result), 0)
Exemple #9
0
 def fill_categories(self):
     """
     This method fill categories into database
     """
     with open("static/json/categories.json", "r", encoding="utf8") as file:
         categories_list = json.load(file)
     for category_dict in categories_list:
         category = Category(name=category_dict["category"])
         try:
             category.save()
         except:
             raise Error(
                 "impossible d'enregistrer la categorie {}".format(category.name))
         for sub_category in category_dict["sub-category"]:
             sub_category = Category(
                 name=sub_category, parent_category=category)
             try:
                 sub_category.save()
             except:
                 raise Error(
                     "Impossible d'eneregistrer la sous-categorie {}".format(sub_category.name))
    def test_egy_to_duplicate_with_error(self, mock_method):
        mock_method.side_effect = Mock(side_effect=Error("test error"))

        egy_with_error = EducationGroupYearFactory(
            education_group=self.education_group,
            academic_year=self.academic_years[-2],
        )
        self.assertEqual(EducationGroupYear.objects.count(), 1)

        result, errors = EducationGroupAutomaticPostponementToN6().postpone()
        self.assertTrue(mock_method.called)
        self.assertEqual(errors, [egy_with_error.education_group])
        self.assertEqual(len(result), 0)
Exemple #11
0
def trans_table(src_table, dest_table, extra=''):
    src_fields = desc_table(src_table)
    dest_fields = desc_table(dest_table)
    if not src_fields:
        raise Error('src_table %s not exists' % src_table)
    m = {}
    for f in dest_fields:
        bmf = get_best_match_field(f, src_fields)
        m[f] = bmf or ['null']
    dfs = [f[0] for f in m.keys()]
    sfs = [f[0] for f in m.values()]
    return "insert into %s (%s) select %s from %s %s;\n" % (
        dest_table, ','.join(dfs), ','.join(sfs), src_table, extra or "")
Exemple #12
0
def db_list_for_read(model_name):

    try:
        db_list = Databases.objects.all().filter(
            model_name=model_name).exclude(count=0)

        if db_list.count() == 0:
            raise Error(
                f"No database for {model_name} Model, please add it from admin"
            )

        return db_list
    except:
        return None
Exemple #13
0
    def as_sql(self, with_limits=True, with_col_aliases=False):
        """
        Create the SQL for this query. Return the SQL string and list of
        parameters.

        If 'with_limits' is False, any limit/offset information is not included
        in the query.
        """
        refcounts_before = self.query.alias_refcount.copy()
        try:
            extra_select, order_by, group_by = self.pre_sql_setup()
            for_update_part = None
            # Is a LIMIT/OFFSET clause needed?
            with_limit_offset = with_limits and (
                self.query.high_mark is not None or self.query.low_mark)
            combinator = self.query.combinator
            features = self.connection.features
            if combinator:
                if not getattr(features,
                               'supports_select_{}'.format(combinator)):
                    raise NotSupportedError(
                        '{} is not supported on this database backend.'.format(
                            combinator))
                result, params = self.get_combinator_sql(
                    combinator, self.query.combinator_all)
            else:
                distinct_fields, distinct_params = self.get_distinct()
                # This must come after 'select', 'ordering', and 'distinct'
                # (see docstring of get_from_clause() for details).
                from_, f_params = self.get_from_clause()
                where, w_params = self.compile(
                    self.where) if self.where is not None else ("", [])
                having, h_params = self.compile(
                    self.having) if self.having is not None else ("", [])
                result = ['SELECT']
                params = []

                if self.query.distinct:
                    distinct_result, distinct_params = self.connection.ops.distinct_sql(
                        distinct_fields,
                        distinct_params,
                    )
                    result += distinct_result
                    params += distinct_params

                out_cols = []
                col_idx = 1
                for _, (s_sql, s_params), alias in self.select + extra_select:
                    if alias:
                        s_sql = '%s AS %s' % (
                            s_sql, self.connection.ops.quote_name(alias))
                    elif with_col_aliases:
                        s_sql = '%s AS %s' % (s_sql, 'Col%d' % col_idx)
                        col_idx += 1
                    params.extend(s_params)
                    out_cols.append(s_sql)

                result += [', '.join(out_cols), 'FROM', *from_]
                params.extend(f_params)

                if self.query.select_for_update and self.connection.features.has_select_for_update:
                    if self.connection.get_autocommit():
                        raise TransactionManagementError(
                            'select_for_update cannot be used outside of a transaction.'
                        )

                    if with_limit_offset and not self.connection.features.supports_select_for_update_with_limit:
                        raise NotSupportedError(
                            'LIMIT/OFFSET is not supported with '
                            'select_for_update on this database backend.')
                    nowait = self.query.select_for_update_nowait
                    skip_locked = self.query.select_for_update_skip_locked
                    of = self.query.select_for_update_of
                    # If it's a NOWAIT/SKIP LOCKED/OF query but the backend
                    # doesn't support it, raise NotSupportedError to prevent a
                    # possible deadlock.
                    if nowait and not self.connection.features.has_select_for_update_nowait:
                        raise NotSupportedError(
                            'NOWAIT is not supported on this database backend.'
                        )
                    elif skip_locked and not self.connection.features.has_select_for_update_skip_locked:
                        raise NotSupportedError(
                            'SKIP LOCKED is not supported on this database backend.'
                        )
                    elif of and not self.connection.features.has_select_for_update_of:
                        raise NotSupportedError(
                            'FOR UPDATE OF is not supported on this database backend.'
                        )
                    for_update_part = self.connection.ops.for_update_sql(
                        nowait=nowait,
                        skip_locked=skip_locked,
                        of=self.get_select_for_update_of_arguments(),
                    )

                if for_update_part and self.connection.features.for_update_after_from:
                    result.append(for_update_part)

                if where:
                    result.append('WHERE %s' % where)
                    params.extend(w_params)

                grouping = []
                for g_sql, g_params in group_by:
                    grouping.append(g_sql)
                    params.extend(g_params)
                if grouping:
                    if distinct_fields:
                        raise NotImplementedError(
                            'annotate() + distinct(fields) is not implemented.'
                        )
                    order_by = order_by or self.connection.ops.force_no_ordering(
                    )
                    result.append('GROUP BY %s' % ', '.join(grouping))
                    if self._meta_ordering:
                        # When the deprecation ends, replace with:
                        # order_by = None
                        warnings.warn(
                            "%s QuerySet won't use Meta.ordering in Django 3.1. "
                            "Add .order_by('%s') to retain the current query."
                            % (self.query.model.__name__, "', '".join(
                                self._meta_ordering)),
                            RemovedInDjango31Warning,
                            stacklevel=4,
                        )
                if having:
                    result.append('HAVING %s' % having)
                    params.extend(h_params)

                if self.query.inner_join:
                    query, first_table_rows, second_table_rows = self.query.inner_join
                    if len(first_table_rows) != len(second_table_rows):
                        raise Error(
                            'Number of columns in both tuples must be equals')
                    subquery_table_name = 'inner_join_table'
                    join_columns = []
                    for i in range(0, len(first_table_rows)):
                        join_columns.append('{}.{}={}.{}'.format(
                            self.query.model._meta.db_table,
                            first_table_rows[i], subquery_table_name,
                            second_table_rows[i]))
                    join_columns = ' '.join(join_columns)
                    query_part = 'INNER JOIN ({}) AS {} ON {}'.format(
                        query, subquery_table_name, join_columns)
                    result.append(query_part)
                    import pdb
                    pdb.set_trace()

            if self.query.explain_query:
                result.insert(
                    0,
                    self.connection.ops.explain_query_prefix(
                        self.query.explain_format,
                        **self.query.explain_options))

            if order_by:
                ordering = []
                for _, (o_sql, o_params, _) in order_by:
                    ordering.append(o_sql)
                    params.extend(o_params)
                result.append('ORDER BY %s' % ', '.join(ordering))

            if with_limit_offset:
                result.append(
                    self.connection.ops.limit_offset_sql(
                        self.query.low_mark, self.query.high_mark))

            if for_update_part and not self.connection.features.for_update_after_from:
                result.append(for_update_part)

            if self.query.subquery and extra_select:
                # If the query is used as a subquery, the extra selects would
                # result in more columns than the left-hand side expression is
                # expecting. This can happen when a subquery uses a combination
                # of order_by() and distinct(), forcing the ordering expressions
                # to be selected as well. Wrap the query in another subquery
                # to exclude extraneous selects.
                sub_selects = []
                sub_params = []
                for index, (select, _, alias) in enumerate(self.select,
                                                           start=1):
                    if not alias and with_col_aliases:
                        alias = 'col%d' % index
                    if alias:
                        sub_selects.append("%s.%s" % (
                            self.connection.ops.quote_name('subquery'),
                            self.connection.ops.quote_name(alias),
                        ))
                    else:
                        select_clone = select.relabeled_clone(
                            {select.alias: 'subquery'})
                        subselect, subparams = select_clone.as_sql(
                            self, self.connection)
                        sub_selects.append(subselect)
                        sub_params.extend(subparams)
                return 'SELECT %s FROM (%s) subquery' % (
                    ', '.join(sub_selects),
                    ' '.join(result),
                ), tuple(sub_params + params)
            return ' '.join(result), tuple(params)
        finally:
            # Finally do cleanup - get rid of the joins we created above.
            self.query.reset_refcounts(refcounts_before)
Exemple #14
0
def select_read_db(model_name):
    db_list = Databases.objects.all().filter(model_name=model_name)
    for db in db_list:
        if db.count < settings.DATABASE_MAX_ROWS:
            return db
    raise Error("Database full")
Exemple #15
0
def select_read_db(model_name=None, is_sharded=True):
    if model_name:
        db_list = Databases.objects.all().filter(model_name=model_name)

        if db_list.count() == 0:
            raise Error(
                f"No database for {model_name} Model, please add it from admin"
            )

        if not is_sharded:
            return None

        # here select replica
        return model_name + '_1'
        #return db_list
    return None


###########################

# def create_forward_many_to_many_manager(superclass, rel, reverse, using_db=None):
#     """
#     Create a manager for the either side of a many-to-many relation.

#     This manager subclasses another manager, generally the default manager of
#     the related model, and adds behaviors specific to many-to-many relations.
#     """

#     class ManyRelatedManager(superclass):
#         def __init__(self, instance=None):
#             super().__init__()

#             self.instance = instance
#             self.using_db = using_db

#             if not reverse:
#                 self.model = rel.model
#                 self.query_field_name = rel.field.related_query_name()
#                 self.prefetch_cache_name = rel.field.name
#                 self.source_field_name = rel.field.m2m_field_name()
#                 self.target_field_name = rel.field.m2m_reverse_field_name()
#                 self.symmetrical = rel.symmetrical
#             else:
#                 self.model = rel.related_model
#                 self.query_field_name = rel.field.name
#                 self.prefetch_cache_name = rel.field.related_query_name()
#                 self.source_field_name = rel.field.m2m_reverse_field_name()
#                 self.target_field_name = rel.field.m2m_field_name()
#                 self.symmetrical = False

#             self.through = rel.through
#             self.reverse = reverse

#             self.source_field = self.through._meta.get_field(self.source_field_name)
#             self.target_field = self.through._meta.get_field(self.target_field_name)

#             self.core_filters = {}
#             self.pk_field_names = {}
#             for lh_field, rh_field in self.source_field.related_fields:
#                 core_filter_key = '%s__%s' % (self.query_field_name, rh_field.name)
#                 self.core_filters[core_filter_key] = getattr(instance, rh_field.attname)
#                 self.pk_field_names[lh_field.name] = rh_field.name

#             self.related_val = self.source_field.get_foreign_related_value(instance)
#             if None in self.related_val:
#                 raise ValueError('"%r" needs to have a value for field "%s" before '
#                                  'this many-to-many relationship can be used.' %
#                                  (instance, self.pk_field_names[self.source_field_name]))
#             # Even if this relation is not to pk, we require still pk value.
#             # The wish is that the instance has been already saved to DB,
#             # although having a pk value isn't a guarantee of that.
#             if instance.pk is None:
#                 raise ValueError("%r instance needs to have a primary key value before "
#                                  "a many-to-many relationship can be used." %
#                                  instance.__class__.__name__)

#         def __call__(self, *, manager):
#             manager = getattr(self.model, manager)
#             manager_class = create_forward_many_to_many_manager(manager.__class__, rel, reverse)
#             return manager_class(instance=self.instance)
#         do_not_call_in_templates = True

#         def _build_remove_filters(self, removed_vals):
#             filters = Q(**{self.source_field_name: self.related_val})
#             # No need to add a subquery condition if removed_vals is a QuerySet without
#             # filters.
#             removed_vals_filters = (not isinstance(removed_vals, QuerySet) or
#                                     removed_vals._has_filters())
#             if removed_vals_filters:
#                 filters &= Q(**{'%s__in' % self.target_field_name: removed_vals})
#             if self.symmetrical:
#                 symmetrical_filters = Q(**{self.target_field_name: self.related_val})
#                 if removed_vals_filters:
#                     symmetrical_filters &= Q(
#                         **{'%s__in' % self.source_field_name: removed_vals})
#                 filters |= symmetrical_filters
#             return filters

#         def _apply_rel_filters(self, queryset):
#             """
#             Filter the queryset for the instance this manager is bound to.
#             """
#             queryset._add_hints(instance=self.instance)
#             #if self._db:
#                 #queryset = queryset.using(self._db)
#             queryset = queryset.using(self.using_db)
#             return queryset._next_is_sticky().filter(**self.core_filters)

#         def _remove_prefetched_objects(self):
#             try:
#                 self.instance._prefetched_objects_cache.pop(self.prefetch_cache_name)
#             except (AttributeError, KeyError):
#                 pass  # nothing to clear from cache

#         def get_queryset(self):

#             try:
#                 return self.instance._prefetched_objects_cache[self.prefetch_cache_name]
#             except (AttributeError, KeyError):
#                 queryset = super().get_queryset().using("product_1")
#                 return self._apply_rel_filters(queryset)

#         def get_prefetch_queryset(self, instances, queryset=None):
#             if queryset is None:
#                 queryset = super().get_queryset().using("product_1")

#             queryset._add_hints(instance=instances[0])
#             queryset = queryset.using(self.using_db)#queryset._db or self._db)

#             query = {'%s__in' % self.query_field_name: instances}
#             queryset = queryset._next_is_sticky().filter(**query)

#             # M2M: need to annotate the query in order to get the primary model
#             # that the secondary model was actually related to. We know that
#             # there will already be a join on the join table, so we can just add
#             # the select.

#             # For non-autocreated 'through' models, can't assume we are
#             # dealing with PK values.
#             fk = self.through._meta.get_field(self.source_field_name)
#             join_table = fk.model._meta.db_table
#             connection = connections[self.using_db]#queryset.db]
#             qn = connection.ops.quote_name
#             queryset = queryset.extra(select={
#                 '_prefetch_related_val_%s' % f.attname:
#                 '%s.%s' % (qn(join_table), qn(f.column)) for f in fk.local_related_fields})
#             return (
#                 queryset,
#                 lambda result: tuple(
#                     getattr(result, '_prefetch_related_val_%s' % f.attname)
#                     for f in fk.local_related_fields
#                 ),
#                 lambda inst: tuple(
#                     f.get_db_prep_value(getattr(inst, f.attname), connection)
#                     for f in fk.foreign_related_fields
#                 ),
#                 False,
#                 self.prefetch_cache_name,
#                 False,
#             )

#         def add(self, *objs, through_defaults=None):
#             self._remove_prefetched_objects()
#             #db = router.db_for_write(self.through, instance=self.instance)
#             db = self.using_db
#             with transaction.atomic(using=db, savepoint=False):
#                 self._add_items(
#                     self.source_field_name, self.target_field_name, *objs,
#                     through_defaults=through_defaults,
#                 )
#                 # If this is a symmetrical m2m relation to self, add the mirror
#                 # entry in the m2m table.
#                 if self.symmetrical:
#                     self._add_items(
#                         self.target_field_name,
#                         self.source_field_name,
#                         *objs,
#                         through_defaults=through_defaults,
#                     )
#         add.alters_data = True

#         def remove(self, *objs):
#             self._remove_prefetched_objects()
#             self._remove_items(self.source_field_name, self.target_field_name, *objs)
#         remove.alters_data = True

#         def clear(self):
#             #db = router.db_for_write(self.through, instance=self.instance)
#             db = self.using_db
#             with transaction.atomic(using=db, savepoint=False):
#                 signals.m2m_changed.send(
#                     sender=self.through, action="pre_clear",
#                     instance=self.instance, reverse=self.reverse,
#                     model=self.model, pk_set=None, using=db,
#                 )
#                 self._remove_prefetched_objects()
#                 filters = self._build_remove_filters(super().get_queryset().using(db))
#                 self.through._default_manager.using(db).filter(filters).delete()

#                 signals.m2m_changed.send(
#                     sender=self.through, action="post_clear",
#                     instance=self.instance, reverse=self.reverse,
#                     model=self.model, pk_set=None, using=db,
#                 )
#         clear.alters_data = True

#         def set(self, objs, *, clear=False, through_defaults=None):
#             # Force evaluation of `objs` in case it's a queryset whose value
#             # could be affected by `manager.clear()`. Refs #19816.
#             objs = tuple(objs)

#             #db = router.db_for_write(self.through, instance=self.instance)
#             db = self.using_db
#             with transaction.atomic(using=db, savepoint=False):
#                 if clear:
#                     self.clear()
#                     self.add(*objs, through_defaults=through_defaults)
#                 else:
#                     old_ids = set(self.using(db).values_list(self.target_field.target_field.attname, flat=True))

#                     new_objs = []
#                     for obj in objs:
#                         fk_val = (
#                             self.target_field.get_foreign_related_value(obj)[0]
#                             if isinstance(obj, self.model) else obj
#                         )
#                         if fk_val in old_ids:
#                             old_ids.remove(fk_val)
#                         else:
#                             new_objs.append(obj)

#                     self.remove(*old_ids)
#                     self.add(*new_objs, through_defaults=through_defaults)
#         set.alters_data = True

#         def create(self, *, through_defaults=None, **kwargs):
#             #db = router.db_for_write(self.instance.__class__, instance=self.instance)
#             db = self.using_db
#             new_obj = super(ManyRelatedManager, self.db_manager(db)).create(**kwargs)
#             self.add(new_obj, through_defaults=through_defaults)
#             return new_obj
#         create.alters_data = True

#         def get_or_create(self, *, through_defaults=None, **kwargs):
#             #db = router.db_for_write(self.instance.__class__, instance=self.instance)
#             db = self.using_db
#             obj, created = super(ManyRelatedManager, self.db_manager(db)).get_or_create(**kwargs)
#             # We only need to add() if created because if we got an object back
#             # from get() then the relationship already exists.
#             if created:
#                 self.add(obj, through_defaults=through_defaults)
#             return obj, created
#         get_or_create.alters_data = True

#         def update_or_create(self, *, through_defaults=None, **kwargs):
#             #db = router.db_for_write(self.instance.__class__, instance=self.instance)
#             db = self.using_db
#             obj, created = super(ManyRelatedManager, self.db_manager(db)).update_or_create(**kwargs)
#             # We only need to add() if created because if we got an object back
#             # from get() then the relationship already exists.
#             if created:
#                 self.add(obj, through_defaults=through_defaults)
#             return obj, created
#         update_or_create.alters_data = True

#         def _get_target_ids(self, target_field_name, objs):
#             """
#             Return the set of ids of `objs` that the target field references.
#             """
#             from django.db.models import Model
#             target_ids = set()
#             target_field = self.through._meta.get_field(target_field_name)
#             for obj in objs:
#                 if isinstance(obj, self.model):
#                     if not router.allow_relation(obj, self.instance):
#                         raise ValueError(
#                             'Cannot add "%r": instance is on database "%s", '
#                             'value is on database "%s"' %
#                             (obj, self.instance._state.db, obj._state.db)
#                         )
#                     target_id = target_field.get_foreign_related_value(obj)[0]
#                     if target_id is None:
#                         raise ValueError(
#                             'Cannot add "%r": the value for field "%s" is None' %
#                             (obj, target_field_name)
#                         )
#                     target_ids.add(target_id)
#                 elif isinstance(obj, Model):
#                     raise TypeError(
#                         "'%s' instance expected, got %r" %
#                         (self.model._meta.object_name, obj)
#                     )
#                 else:
#                     target_ids.add(obj)
#             return target_ids

#         def _get_missing_target_ids(self, source_field_name, target_field_name, db, target_ids):
#             """
#             Return the subset of ids of `objs` that aren't already assigned to
#             this relationship.
#             """
#             vals = self.through._default_manager.using(db).values_list(
#                 target_field_name, flat=True
#             ).filter(**{
#                 source_field_name: self.related_val[0],
#                 '%s__in' % target_field_name: target_ids,
#             })
#             return target_ids.difference(vals)

#         def _get_add_plan(self, db, source_field_name):
#             """
#             Return a boolean triple of the way the add should be performed.

#             The first element is whether or not bulk_create(ignore_conflicts)
#             can be used, the second whether or not signals must be sent, and
#             the third element is whether or not the immediate bulk insertion
#             with conflicts ignored can be performed.
#             """
#             # Conflicts can be ignored when the intermediary model is
#             # auto-created as the only possible collision is on the
#             # (source_id, target_id) tuple. The same assertion doesn't hold for
#             # user-defined intermediary models as they could have other fields
#             # causing conflicts which must be surfaced.
#             can_ignore_conflicts = (
#                 connections[db].features.supports_ignore_conflicts and
#                 self.through._meta.auto_created is not False
#             )
#             # Don't send the signal when inserting duplicate data row
#             # for symmetrical reverse entries.
#             must_send_signals = (self.reverse or source_field_name == self.source_field_name) and (
#                 signals.m2m_changed.has_listeners(self.through)
#             )
#             # Fast addition through bulk insertion can only be performed
#             # if no m2m_changed listeners are connected for self.through
#             # as they require the added set of ids to be provided via
#             # pk_set.
#             return can_ignore_conflicts, must_send_signals, (can_ignore_conflicts and not must_send_signals)

#         def _add_items(self, source_field_name, target_field_name, *objs, through_defaults=None):
#             # source_field_name: the PK fieldname in join table for the source object
#             # target_field_name: the PK fieldname in join table for the target object
#             # *objs - objects to add. Either object instances, or primary keys of object instances.
#             through_defaults = through_defaults or {}

#             # If there aren't any objects, there is nothing to do.
#             if objs:
#                 target_ids = self._get_target_ids(target_field_name, objs)
#                 #db = router.db_for_write(self.through, instance=self.instance)
#                 db = self.using_db
#                 can_ignore_conflicts, must_send_signals, can_fast_add = self._get_add_plan(db, source_field_name)
#                 if can_fast_add:
#                     self.through._default_manager.using(db).bulk_create([
#                         self.through(**{
#                             '%s_id' % source_field_name: self.related_val[0],
#                             '%s_id' % target_field_name: target_id,
#                         })
#                         for target_id in target_ids
#                     ], ignore_conflicts=True)
#                     return

#                 missing_target_ids = self._get_missing_target_ids(
#                     source_field_name, target_field_name, db, target_ids
#                 )
#                 with transaction.atomic(using=db, savepoint=False):
#                     if must_send_signals:
#                         signals.m2m_changed.send(
#                             sender=self.through, action='pre_add',
#                             instance=self.instance, reverse=self.reverse,
#                             model=self.model, pk_set=missing_target_ids, using=db,
#                         )

#                     # Add the ones that aren't there already.
#                     self.through._default_manager.using(db).bulk_create([
#                         self.through(**through_defaults, **{
#                             '%s_id' % source_field_name: self.related_val[0],
#                             '%s_id' % target_field_name: target_id,
#                         })
#                         for target_id in missing_target_ids
#                     ], ignore_conflicts=can_ignore_conflicts)

#                     if must_send_signals:
#                         signals.m2m_changed.send(
#                             sender=self.through, action='post_add',
#                             instance=self.instance, reverse=self.reverse,
#                             model=self.model, pk_set=missing_target_ids, using=db,
#                         )

#         def _remove_items(self, source_field_name, target_field_name, *objs):
#             # source_field_name: the PK colname in join table for the source object
#             # target_field_name: the PK colname in join table for the target object
#             # *objs - objects to remove. Either object instances, or primary
#             # keys of object instances.
#             if not objs:
#                 return

#             # Check that all the objects are of the right type
#             old_ids = set()
#             for obj in objs:
#                 if isinstance(obj, self.model):
#                     fk_val = self.target_field.get_foreign_related_value(obj)[0]
#                     old_ids.add(fk_val)
#                 else:
#                     old_ids.add(obj)

#             #db = router.db_for_write(self.through, instance=self.instance)
#             db = self.using_db
#             with transaction.atomic(using=db, savepoint=False):
#                 # Send a signal to the other end if need be.
#                 signals.m2m_changed.send(
#                     sender=self.through, action="pre_remove",
#                     instance=self.instance, reverse=self.reverse,
#                     model=self.model, pk_set=old_ids, using=db,
#                 )
#                 target_model_qs = super().get_queryset()
#                 if target_model_qs._has_filters():
#                     old_vals = target_model_qs.using(db).filter(**{
#                         '%s__in' % self.target_field.target_field.attname: old_ids})
#                 else:
#                     old_vals = old_ids
#                 filters = self._build_remove_filters(old_vals)
#                 self.through._default_manager.using(db).filter(filters).delete()

#                 signals.m2m_changed.send(
#                     sender=self.through, action="post_remove",
#                     instance=self.instance, reverse=self.reverse,
#                     model=self.model, pk_set=old_ids, using=db,
#                 )

#     return ManyRelatedManager
Exemple #16
0
 def save(self, *args, **kwargs):
     if self.follower_user == self.following_user:
         raise Error(
             "Attempted to create a follow object where follower_user == following_user"
         )
     super(Follower, self).save(*args, **kwargs)
Exemple #17
0
 def failing_execute_mock(query):
     raise Error("Mock Exception")
Exemple #18
0
    def generate_service(
        self,
        filename_or_pattern,
        variable_name,
        unique=True,
        has_colormap=True,
        model_set=None,
        model_id_lookup=None,
    ):
        """
        Creates an ncdjango service from a geotiff (stack).
        :param filename_or_pattern: A local filename or glob pattern.
        :param variable_name: The variable name of the data to assign to the ncdjango service.
        :param unique: Indicate whether to use a UniqueValuesRenderer or a StretchedRenderer
        :param has_colormap: Indicate whether the service has a colormap or not.
        :param model_set: The Queryset to filter over.
        :param model_id_lookup: A unique identifier used for specific datasets.
        :return: One (1) ncdjango service.
        """

        has_time = '*' in filename_or_pattern  # pattern if true, filename otherwise

        # Construct relative path for new netcdf file, relative to NC_ROOT. Used as 'data_path' in ncdjango.Service
        nc_rel_path = os.path.join(self.scenario.project.library.name,
                                   self.scenario.project.name,
                                   'Scenario-' + str(self.scenario.sid))
        if has_time:
            nc_rel_path = os.path.join(nc_rel_path, 'output',
                                       variable_name + '.nc')
        else:
            nc_rel_path = os.path.join(
                nc_rel_path, filename_or_pattern.replace('tif', 'nc'))

        # Absolute path where we want the new netcdf to live.
        nc_full_path = os.path.join(NC_ROOT, nc_rel_path)
        if not os.path.exists(os.path.dirname(nc_full_path)):
            os.makedirs(os.path.dirname(nc_full_path))

        variable_names = []

        # No patterns, so create a simple input raster
        if not has_time:
            self.convert_to_netcdf(
                os.path.join(self.scenario.input_directory,
                             filename_or_pattern), nc_full_path, variable_name)

        # Time series output pattern, convert to timeseries netcdf
        else:
            ctype = filename_or_pattern[:-4].split('-')[2:][0]
            assert ctype == CTYPE_HASH[variable_name]  # sanity check

            # collect all information to make valid patterns
            iterations = []
            timesteps = []
            ssim_ids = []

            glob_pattern = glob.glob(
                os.path.join(self.scenario.output_directory,
                             filename_or_pattern))
            glob_pattern.sort()
            for f in glob_pattern:

                it, ts, *ctype_id = f[:-4].split(os.sep)[-1].split('-')

                if it not in iterations:
                    iterations.append(it)  # E.g. ['It0001','It0002', ...]

                if ts not in timesteps:  # T.g. ['Ts0000','Ts0001', ...]
                    timesteps.append(ts)

                if len(ctype_id) > 1:
                    ssim_id = int(ctype_id[1])
                    if ssim_id not in ssim_ids:
                        ssim_ids.append(
                            ssim_id)  # E.g. ['tg', '93'], ['ta', '234'], etc.

                assert ctype == ctype_id[
                    0]  # pattern matching is way off (not sure this would even happen)

            # ssim_ids are internal, have to match with our system
            # service variables are <variable_name>-<inner_id>-<iteration>
            if len(ssim_ids):
                filename_patterns = []
                ssim_result = ssim_query(
                    'select * from ' + SSIM_TABLE[variable_name],
                    self.scenario.project.library)

                # Create valid hash map
                ssim_hash = {}
                for ssim_id in ssim_ids:
                    inner_id = None
                    for row in ssim_result:
                        # match primary key id and project id, and only create filename_patterns if a match if found
                        if ssim_id == row[0] and str(
                                self.scenario.project.pid) == str(row[1]):
                            name = row[2]
                            inner_id = INNER_TABLE[
                                variable_name].objects.filter(
                                    name__exact=name,
                                    project=self.scenario.project).first().id
                            break
                    if inner_id:
                        ssim_hash[ssim_id] = inner_id

                # Now build proper filename_patterns
                for it in iterations:
                    for ssim_id in ssim_ids:
                        filename_patterns.append(
                            os.path.join(
                                self.scenario.output_directory,
                                '{}-Ts*-{}-{}.tif'.format(it, ctype, ssim_id)))

                for pattern in filename_patterns:
                    pattern_id = ssim_hash[int(
                        pattern.split(os.sep)[-1].split('-')[-1][:-4])]
                    iteration_num = int(
                        pattern.split(os.sep)[-1].split('-')[0][2:])
                    iteration_var_name = '{variable_name}-{id}-{iteration}'.format(
                        variable_name=variable_name,
                        id=pattern_id,
                        iteration=iteration_num)
                    variable_names.append(iteration_var_name)
                    iteration_nc_file = os.path.join(
                        self.scenario.output_directory,
                        iteration_var_name + '.nc')
                    self.convert_to_netcdf(pattern, iteration_nc_file,
                                           iteration_var_name)

                merge_nc_pattern = os.path.join(self.scenario.output_directory,
                                                variable_name + '-*-*.nc')

            # no ids
            # service variables are <variable_name>-<iteration>
            else:
                filename_patterns = [
                    os.path.join(self.scenario.output_directory,
                                 '{}-Ts*-{}.tif'.format(it, ctype))
                    for it in iterations
                ]

                merge_nc_pattern = os.path.join(self.scenario.output_directory,
                                                variable_name + '-*.nc')

                for pattern in filename_patterns:
                    iteration_num = int(
                        pattern.split(os.sep)[-1].split('-')[0][2:])
                    iteration_var_name = '{variable_name}-{iteration}'.format(
                        variable_name=variable_name, iteration=iteration_num)
                    variable_names.append(iteration_var_name)
                    iteration_nc_file = os.path.join(
                        self.scenario.output_directory,
                        iteration_var_name + '.nc')
                    self.convert_to_netcdf(pattern, iteration_nc_file,
                                           iteration_var_name)

            self.merge_netcdf(merge_nc_pattern, nc_full_path)

        info = describe(nc_full_path)
        grid = info['variables'][variable_names[0] if len(variable_names) else
                                 variable_name]['spatial_grid']['extent']
        extent = BBox((grid['xmin'], grid['ymin'], grid['xmax'], grid['ymax']),
                      projection=pyproj.Proj(grid['proj4']))
        steps_per_variable = None
        t = None
        t_start = None
        t_end = None
        dimensions = list(info['dimensions'].keys())
        if 'x' in dimensions:
            x, y = ('x', 'y')
        else:
            x, y = ('lon', 'lat')
        if has_time:
            t = 'time'
            steps_per_variable = info['dimensions'][t]['length']
            t_start = datetime.datetime(2000, 1, 1)
            t_end = t_start + datetime.timedelta(1) * steps_per_variable

        try:
            service = Service.objects.create(name=uuid.uuid4(),
                                             data_path=nc_rel_path,
                                             projection=grid['proj4'],
                                             full_extent=extent,
                                             initial_extent=extent)

            if has_time and len(variable_names) and steps_per_variable:

                # Set required time fields
                service.supports_time = True
                service.time_start = t_start
                service.time_end = t_end
                service.time_interval = 1
                service.time_interval_units = 'days'
                service.calendar = 'standard'
                service.save()

            if unique:
                model = model_set or getattr(self.scenario.project,
                                             variable_name)
                unique_id_lookup = model_id_lookup or NAME_HASH[
                    variable_name] + '_id'
                try:
                    if has_colormap:
                        queryset = model.values_list('color', unique_id_lookup,
                                                     'name')
                        renderer = self.generate_unique_renderer(queryset)
                    else:
                        queryset = model.values_list(unique_id_lookup, 'name')
                        renderer = self.generate_unique_renderer(
                            queryset, randomize_colors=True)
                except:
                    raise AssertionError(
                        CREATE_RENDERER_ERROR_MSG.format(vname=variable_name))
            else:
                renderer = self.generate_stretched_renderer(info)

            if has_time and len(variable_names):

                for name in variable_names:
                    Variable.objects.create(service=service,
                                            index=variable_names.index(name),
                                            variable=name,
                                            projection=grid['proj4'],
                                            x_dimension=x,
                                            y_dimension=y,
                                            name=name,
                                            renderer=renderer,
                                            full_extent=extent,
                                            supports_time=True,
                                            time_dimension=t,
                                            time_start=t_start,
                                            time_end=t_end,
                                            time_steps=steps_per_variable)

            else:
                Variable.objects.create(service=service,
                                        index=0,
                                        variable=variable_name,
                                        projection=grid['proj4'],
                                        x_dimension=x,
                                        y_dimension=y,
                                        name=variable_name,
                                        renderer=renderer,
                                        full_extent=extent)

            return service

        except:
            raise Error(
                CREATE_SERVICE_ERROR_MSG.format(variable_name,
                                                self.scenario.sid))
Exemple #19
0
def pay_init(request):
    response_data = {}
    if request.method == 'POST':
        try:
            ip_address = get_client_ip(request)

            name = request.POST.get("name")
            surname = request.POST.get("surname")
            email = request.POST.get("email")
            country = request.POST.get("country")
            city = request.POST.get("city")
            full_address = request.POST.get("full_address")
            credit_amount = request.POST.get("credit_amount")
            card_name = request.POST.get("card_name")
            card_number = request.POST.get("card_number").replace(" ", "")
            card_number = str(card_number)
            expiration_date = request.POST.get("expiration_date")
            expiration_date = expiration_date.split("/")
            expiration_month = expiration_date[0]
            expiration_year = expiration_date[1]
            security_code = request.POST.get("security_code")

            payment_card = {
                'cardHolderName': card_name,
                'cardNumber': card_number,
                'expireMonth': expiration_month,
                'expireYear': '20'+expiration_year,
                'cvc': security_code,
                'registerCard': '0'
            }

            buyer = {
                'id': 'BY789',
                'name': name,
                'surname': surname,
                # 'gsmNumber': '+905350000000',
                'email': email,
                'identityNumber': '74300864791',
                # 'lastLoginDate': '2015-10-05 12:43:35',
                # 'registrationDate': '2013-04-21 15:12:09',
                'registrationAddress': full_address,
                'ip': ip_address,
                'city': city,
                'country': country,
                # 'zipCode': '34732'
            }

            address = {
                'contactName': name + ' ' + surname,
                'city': city,
                'country': country,
                'address': full_address,
            }

            # default value for request
            basket_items = [
                {
                    'id': '1',
                    'name': 'KrediEkleme',
                    'category1': 'Kredi',
                    'itemType': 'VIRTUAL',
                    'price': '1'
                }
            ]

            request_iyzico = {
                'locale': 'tr',
                'conversationId': '123456789',
                'price': '1',
                'paidPrice': credit_amount,
                'currency': 'TRY',
                'installment': '1',
                'basketId': 'B67832',
                'paymentChannel': 'WEB',
                'paymentGroup': 'PRODUCT',
                'paymentCard': payment_card,
                "callbackUrl": "/",
                'buyer': buyer,
                'shippingAddress': address,
                'billingAddress': address,
                'basketItems': basket_items
            }

            threeds_initialize = iyzipay.ThreedsInitialize().create(request_iyzico, settings.IYZICO_OPTIONS)
            
            x=threeds_initialize.read().decode('utf-8')
            status_content = json.loads(x)
            print(x)
            print(status_content["threeDSHtmlContent"])
            response_data['veri'] = status_content["threeDSHtmlContent"]
            if status_content["status"] == "failure":
                raise Error('this process cannot be valid. Error = {}'.format(status_content["errorMessage"]))
        except Error as e:
            # TODO: Log here
            response_data['error'] = True
            response_data['result'] = e.args[0]

        return HttpResponse(
            json.dumps(response_data),
            content_type="application/json"
        )
    else:
        context = {
            'test': 'test',
            'page_info_title': _('Payment'),
        }
        return render(request, 'base_3ds.html',context)
Exemple #20
0
    def save(self, *args, **kwargs):
        if self.user.is_active:
            raise Error(
                "Attempted to create a verification code for active user")

        return super(VerificationCode, self).save(*args, **kwargs)