Esempio n. 1
0
    def handleRepoWithCounts(self, dbrepo, hgrepo, dbcount, hgcount):
        """Just check if changesets counts in db and hg are the same
        """
        if dbcount >= hgcount:
            # nothing to be done
            self.verbose("%s\tin good shape" % dbrepo.name)
            return
        missing = hgcount - dbcount
        cnt = 0
        through = dbrepo.changesets.through
        using = router.db_for_write(dbrepo.__class__, instance=dbrepo)
        connection = connections[using]
        ins = InsertQuery(through)
        ins.insert_values([(through.repository.field, None),
                           (through.changeset.field, None)])
        comp = ins.get_compiler(using)
        comp.return_id = False
        sqlinsert, _params = comp.as_sql()

        self.verbose("%s\t%d missing" % (dbrepo.name, missing))
        for revisions in self.chunk(self.nodes(hgrepo)):
            self.progress()
            with transaction.commit_on_success(using=using):
                cs = Changeset.objects.filter(revision__in=revisions)
                cs = cs.exclude(repositories=dbrepo)
                csids = list(cs.values_list('id', flat=True))
                if not csids:
                    continue
                vals = [(dbrepo.id, csid) for csid in csids]
                connection.cursor().executemany(sqlinsert, vals)
                transaction.set_dirty(using)
                cnt += len(csids)
        self.normal("%s\tadded %d changesets" % (dbrepo.name, cnt))
        return
Esempio n. 2
0
    def make_batch(N, jt_pos=0):
        jt = None
        while not jt:
            try:
                jt = JobTemplate.objects.all()[jt_pos % jt_count]
            except IndexError as e:
                # seems to happen every now and then due to some race condition
                print('Warning: IndexError on {} JT, error: {}'.format(
                    jt_pos % jt_count, e))
            jt_pos += 1
        jt_defaults = dict((f.attname, getattr(jt, f.attname))
                           for f in JobTemplate._meta.get_fields()
                           if f.concrete and f.attname in job_field_names
                           and getattr(jt, f.attname))
        jt_defaults['job_template_id'] = jt.pk
        jt_defaults[
            'unified_job_template_id'] = jt.pk  # populated by save method

        jobs = [
            Job(status=STATUS_OPTIONS[i % len(STATUS_OPTIONS)],
                started=now() - time_delta,
                created=now() - time_delta,
                modified=now() - time_delta,
                finished=now() - time_delta,
                elapsed=0.,
                **jt_defaults) for i in range(N)
        ]
        ujs = UnifiedJob.objects.bulk_create(jobs)
        query = InsertQuery(Job)
        query.insert_values(fields, ujs)
        with connection.cursor() as cursor:
            query, params = query.sql_with_params()[0]
            cursor.execute(query, params)
        return ujs[-1], jt_pos
Esempio n. 3
0
    def handleRepoWithCounts(self, dbrepo, hgrepo, dbcount, hgcount):
        """Just check if changesets counts in db and hg are the same
        """
        if dbcount >= hgcount:
            # nothing to be done
            self.verbose("%s\tin good shape" % dbrepo.name)
            return
        missing = hgcount - dbcount
        cnt = 0
        through = dbrepo.changesets.through
        using = router.db_for_write(dbrepo.__class__, instance=dbrepo)
        connection = connections[using]
        ins = InsertQuery(through)
        ins.insert_values([(through.repository.field, None),
                           (through.changeset.field, None)])
        comp = ins.get_compiler(using)
        comp.return_id = False
        sqlinsert, _params = comp.as_sql()

        self.verbose("%s\t%d missing" % (dbrepo.name, missing))
        for revisions in self.chunk(self.nodes(hgrepo)):
            self.progress()
            with transaction.commit_on_success(using=using):
                cs = Changeset.objects.filter(revision__in=revisions)
                cs = cs.exclude(repositories=dbrepo)
                csids = list(cs.values_list('id', flat=True))
                if not csids:
                    continue
                vals = [(dbrepo.id, csid) for csid in csids]
                connection.cursor().executemany(sqlinsert, vals)
                transaction.set_dirty(using)
                cnt += len(csids)
        self.normal("%s\tadded %d changesets" % (dbrepo.name, cnt))
        return
Esempio n. 4
0
 def test_sql_insert_compiler_return_id_attribute(self):
     """
     Regression test for #14019: SQLInsertCompiler.as_sql() failure
     """
     db = router.db_for_write(Party)
     query = InsertQuery(Party)
     query.insert_values([Party._meta.fields[0]], [], raw=False)
     # this line will raise an AttributeError without the accompanying fix
     query.get_compiler(using=db).as_sql()
Esempio n. 5
0
def test_fallback_serialization(val):
    """
    Test that the non-gissy fallback point field does actually serialize into a semicolon-separated
    format even in the SQL level.
    """
    obj = ModelUsingFallback(f=val)
    query = InsertQuery(ModelUsingFallback)
    query.insert_values(ModelUsingFallback._meta.get_fields(), [obj])
    comp = query.get_compiler(using="default")
    assert isinstance(comp, SQLInsertCompiler)
    comp.return_id = True  # prevent bulk
    sql, params = comp.as_sql()[0]
    assert "60.0;22.0" in params
Esempio n. 6
0
def test_fallback_serialization(val):
    """
    Test that the non-gissy fallback point field does actually serialize into a semicolon-separated
    format even in the SQL level.
    """
    obj = ModelUsingFallback(f=val)
    query = InsertQuery(ModelUsingFallback)
    query.insert_values(ModelUsingFallback._meta.get_fields(), [obj])
    comp = query.get_compiler(using="default")
    assert isinstance(comp, SQLInsertCompiler)
    comp.return_id = True  # prevent bulk
    sql, params = comp.as_sql()[0]
    assert "60.0;22.0" in params
Esempio n. 7
0
 def make_batch(N, **extra):
     jobs = [
         Job(status='canceled',
             created=now(),
             modified=now(),
             elapsed=0.,
             **extra) for i in range(N)
     ]
     ujs = UnifiedJob.objects.bulk_create(jobs)
     query = InsertQuery(Job)
     query.insert_values(fields, ujs)
     with connection.cursor() as cursor:
         query, params = query.sql_with_params()[0]
         cursor.execute(query, params)
     return ujs[-1]
Esempio n. 8
0
 def save(self, model):
     """Adapted from the django query compiler (private api)."""
     if isinstance(model, Cluster) and model.primary_comment is None:
         return
     query = InsertQuery(model)
     meta = query.model._meta
     values = [
         (f, f.get_db_prep_save(f.pre_save(model, True), connection=self.connection)) for f in meta.local_fields
     ]
     query.insert_values(values)
     result = [
         "INSERT INTO %s" % self.quote(meta.db_table),
         "(%s)" % ", ".join([self.quote(c) for c in query.columns]),
         "VALUES (%s)" % ", ".join([self.escape(p) for p in query.params]),
     ]
     self.out(" ".join(result))
     self.out(";\n")
Esempio n. 9
0
 def test_sql_insert_compiler_return_id_attribute(self):
     """
     Regression test for #14019: SQLInsertCompiler.as_sql() failure
     """
     db = router.db_for_write(Party)
     query = InsertQuery(Party)
     query.insert_values([Party._meta.fields[0]], [], raw=False)
     # this line will raise an AttributeError without the accompanying fix
     query.get_compiler(using=db).as_sql()
 def get_query(cls,
               field,
               with_table=False,
               for_update=True):  # type: (Field, bool, bool) -> Query
     """
     Gets django query for current SQL generation, depending on generation parameters
     :param field: Field for which to get query
     :param with_table: If flag is set, column name in sql will be prefixed by table name
     :param for_update: If flag is set, update query is generated. Otherwise - insert query
     :return: Query instance
     """
     kwargs = {'alias_cols': with_table} if django.VERSION >= (3, 1) else {}
     query = UpdateQuery(field.model, **
                         kwargs) if for_update else InsertQuery(
                             field.model, **kwargs)
     return query