def test_batch_delete_if_exists_success(self):
        """
        Tests that batch deletes with if_exists work, and throw proper LWTException when they are are not applied

        @since 3.1
        @jira_ticket PYTHON-432
        @expected_result Deletes will be preformed if they exist, otherwise throw LWTException

        @test_category object_mapper
        """

        id = uuid4()

        m = TestIfExistsModel.create(id=id, count=8, text='123456789')

        with BatchQuery() as b:
            m.batch(b).if_exists().delete()

        q = TestIfExistsModel.objects(id=id)
        self.assertEqual(len(q), 0)

        with self.assertRaises(LWTException) as assertion:
            with BatchQuery() as b:
                m = TestIfExistsModel(id=uuid4(), count=42)  # Doesn't exist
                m.batch(b).if_exists().delete()

        self.assertEqual(assertion.exception.existing, {
            '[applied]': False,
        })
    def test_batch_query_connection_override(self):
        """
        Test that we cannot override a BatchQuery connection per model

        @since 3.7
        @jira_ticket PYTHON-613
        @expected_result Proper exceptions should be raised

        @test_category object_mapper
        """

        with self.assertRaises(CQLEngineException):
            with BatchQuery(connection='cluster') as b:
                TestModel.batch(b).using(connection='test').save()

        with self.assertRaises(CQLEngineException):
            with BatchQuery(connection='cluster') as b:
                TestModel.using(connection='test').batch(b).save()

        with ContextQuery(TestModel, AnotherTestModel,
                          connection='cluster') as (tm, atm):
            obj1 = tm.objects.get(partition=1, cluster=1)
            obj1.__connection__ = None

        with self.assertRaises(CQLEngineException):
            with BatchQuery(connection='cluster') as b:
                obj1.using(connection='test').batch(b).save()

        with self.assertRaises(CQLEngineException):
            with BatchQuery(connection='cluster') as b:
                obj1.batch(b).using(connection='test').save()
Beispiel #3
0
    def test_batch_insert_if_not_exists(self):
        """ tests that batch insertion with if_not_exists work as expected """

        id = uuid4()

        with BatchQuery() as b:
            TestIfNotExistsModel.batch(b).if_not_exists().create(
                id=id, count=8, text='123456789')

        b = BatchQuery()
        TestIfNotExistsModel.batch(b).if_not_exists().create(
            id=id, count=9, text='111111111111')
        with self.assertRaises(LWTException) as assertion:
            b.execute()

        self.assertEqual(assertion.exception.existing, {
            'count': 8,
            'id': id,
            'text': '123456789',
            '[applied]': False,
        })

        q = TestIfNotExistsModel.objects(id=id)
        self.assertEqual(len(q), 1)

        tm = q.first()
        self.assertEqual(tm.count, 8)
        self.assertEqual(tm.text, '123456789')
Beispiel #4
0
    def test_cqlengine_batch_type(self):
        """
        Tests the different types of `class: cassandra.cqlengine.query.BatchType`

        @since 3.13
        @jira_ticket PYTHON-88
        @expected_result batch query succeeds and the results
        are correctly readen

        @test_category query
        """
        with BatchQuery(batch_type=cqlengine_BatchType.Unlogged) as b:
            TestMultiKeyModel.batch(b).create(partition=1, cluster=1)
            TestMultiKeyModel.batch(b).create(partition=1, cluster=2)

        obj = TestMultiKeyModel.objects(partition=1)
        self.assertEqual(2, len(obj))

        with BatchQuery(batch_type=cqlengine_BatchType.Counter) as b:
            CounterBatchQueryModel.batch(b).create(k=1, v=1)
            CounterBatchQueryModel.batch(b).create(k=1, v=2)
            CounterBatchQueryModel.batch(b).create(k=1, v=10)

        obj = CounterBatchQueryModel.objects(k=1)
        self.assertEqual(1, len(obj))
        self.assertEqual(obj[0].v, 13)
Beispiel #5
0
def import_data():

    connection.setup(['127.0.0.1'], "geonames", protocol_version=3)
    fieldnames = [col for col in Geoname().__dict__['_values']]

    with open("geonames/allCountries.txt", encoding='utf8',
              newline='') as csvfile:

        # Création du DictReader
        csv.register_dialect('geoname', delimiter='\t', quoting=csv.QUOTE_NONE)
        reader = csv.DictReader(csvfile,
                                dialect='geoname',
                                fieldnames=fieldnames)

        # Ingestion par lots
        count = 0
        batch = BatchQuery()
        for row in reader:
            new_row = clean_row(row)
            Geoname.batch(batch).create(**new_row)
            count += 1
            if not count % 1000:
                batch.execute()
                batch = BatchQuery()
                logger.info('Importés: {}'.format(count))

        batch.execute()
Beispiel #6
0
    def test_batch_query_different_connection(self):
        """Test BatchQuery with Models that have a different connection"""

        # Testing on a model class
        TestModel.__connection__ = 'cluster'
        AnotherTestModel.__connection__ = 'cluster2'

        with self.assertRaises(CQLEngineException):
            with BatchQuery() as b:
                TestModel.objects.batch(b).create(partition=1, cluster=1)
                AnotherTestModel.objects.batch(b).create(partition=1, cluster=1)

        TestModel.__connection__ = None
        AnotherTestModel.__connection__ = None

        with BatchQuery(connection='cluster') as b:
            TestModel.objects.batch(b).create(partition=1, cluster=1)
            AnotherTestModel.objects.batch(b).create(partition=1, cluster=1)

        # Testing on a model instance
        with ContextQuery(TestModel, AnotherTestModel, connection='cluster') as (tm, atm):
            obj1 = tm.objects.get(partition=1, cluster=1)
            obj2 = atm.objects.get(partition=1, cluster=1)

            obj1.__connection__ = 'cluster'
            obj2.__connection__ = 'cluster2'

            obj1.count = 4
            obj2.count = 4

        with self.assertRaises(CQLEngineException):
            with BatchQuery() as b:
                obj1.batch(b).save()
                obj2.batch(b).save()
    def test_batch_update_if_exists_success(self):
        """
        Tests that batch update with if_exists work as expected

        @since 3.1
        @jira_ticket PYTHON-432
        @expected_result

        @test_category object_mapper
        """

        id = uuid4()

        m = TestIfExistsModel.create(id=id, count=8, text='123456789')

        with BatchQuery() as b:
            m.text = '111111111'
            m.batch(b).if_exists().update()

        with self.assertRaises(LWTException) as assertion:
            with BatchQuery() as b:
                m = TestIfExistsModel(id=uuid4(), count=42)  # Doesn't exist
                m.batch(b).if_exists().update()

        self.assertEqual(assertion.exception.existing, {
            '[applied]': False,
        })

        q = TestIfExistsModel.objects(id=id)
        self.assertEqual(len(q), 1)

        tm = q.first()
        self.assertEqual(tm.count, 8)
        self.assertEqual(tm.text, '111111111')
    def test_basic_batch_query(self):
        """
        Test Batch queries with connections explicitly set

        @since 3.7
        @jira_ticket PYTHON-613
        @expected_result queries should execute appropriately

        @test_category object_mapper
        """

        # No connection with a QuerySet (default is a fake one)
        with self.assertRaises(NoHostAvailable):
            with BatchQuery() as b:
                TestModel.objects.batch(b).create(partition=1, cluster=1)

        # Explicit connection with a QuerySet
        with BatchQuery(connection='cluster') as b:
            TestModel.objects.batch(b).create(partition=1, cluster=1)

        # Get an object from the BD
        with ContextQuery(TestModel, connection='cluster') as tm:
            obj = tm.objects.get(partition=1, cluster=1)
            obj.__connection__ = None

        # No connection with a model (default is a fake one)
        with self.assertRaises(NoHostAvailable):
            with BatchQuery() as b:
                obj.count = 2
                obj.batch(b).save()

        # Explicit connection with a model
        with BatchQuery(connection='cluster') as b:
            obj.count = 2
            obj.batch(b).save()
Beispiel #9
0
    def test_basic_batch_query(self):
        """Test BatchQuery requests"""

        # No connection with a QuerySet (default is a fake one)
        with self.assertRaises(NoHostAvailable):
            with BatchQuery() as b:
                TestModel.objects.batch(b).create(partition=1, cluster=1)

        # Explicit connection with a QuerySet
        with BatchQuery(connection='cluster') as b:
            TestModel.objects.batch(b).create(partition=1, cluster=1)

        # Get an object from the BD
        with ContextQuery(TestModel, connection='cluster') as tm:
            obj = tm.objects.get(partition=1, cluster=1)
            obj.__connection__ = None

        # No connection with a model (default is a fake one)
        with self.assertRaises(NoHostAvailable):
            with BatchQuery() as b:
                obj.count = 2
                obj.batch(b).save()

        # Explicit connection with a model
        with BatchQuery(connection='cluster') as b:
            obj.count = 2
            obj.batch(b).save()
Beispiel #10
0
    def test_callbacks_tied_to_execute(self):
        """Batch callbacks should NOT fire if batch is not executed in context manager mode"""

        call_history = []

        def my_callback(*args, **kwargs):
            call_history.append(args)

        with BatchQuery() as batch:
            batch.add_callback(my_callback)

        self.assertEqual(len(call_history), 1)

        class SomeError(Exception):
            pass

        with self.assertRaises(SomeError):
            with BatchQuery() as batch:
                batch.add_callback(my_callback)
                # this error bubbling up through context manager
                # should prevent callback runs (along with b.execute())
                raise SomeError

        # still same call history. Nothing added
        self.assertEqual(len(call_history), 1)

        # but if execute ran, even with an error bubbling through
        # the callbacks also would have fired
        with self.assertRaises(SomeError):
            with BatchQuery(execute_on_exception=True) as batch:
                batch.add_callback(my_callback)
                raise SomeError

        # updated call history
        self.assertEqual(len(call_history), 2)
    def test_batch_update_transaction(self):
        t = TestTransactionModel.create(text='something', count=5)
        id = t.id
        with BatchQuery() as b:
            t.batch(b).iff(count=5).update(text='something else')

        updated = TestTransactionModel.objects(id=id).first()
        self.assertEqual(updated.text, 'something else')

        b = BatchQuery()
        updated.batch(b).iff(count=6).update(text='and another thing')
        self.assertRaises(LWTException, b.execute)

        updated = TestTransactionModel.objects(id=id).first()
        self.assertEqual(updated.text, 'something else')
Beispiel #12
0
def import_csv(csv_path, batch=False, chunksize=500, skiprows=None):
    start_time = dt.datetime.today().timestamp()

    for df in pd.read_csv(csv_path,
                          delimiter='\t',
                          encoding='utf-8',
                          dtype=dtype,
                          converters=converters,
                          chunksize=chunksize,
                          skiprows=skiprows,
                          error_bad_lines=False,
                          warn_bad_lines=True):

        print("transform")
        df = df.where((pd.notnull(df)), None)
        print("importing rows {} to {}".format(df.index.min(), df.index.max()))

        if batch:
            b = BatchQuery()

        for i, row in df.iterrows():
            try:
                print(u"DOING {} ; {}".format(i, row['code'].encode('utf-8')))

                if row['code'] is None or len(row['code'].strip()) == 0:
                    print("error with line {0} : code = '{1}'".format(
                        i, row['code'].encode('utf-8')))
                    continue

                row_converted = {
                    Product._get_column_by_db_name(cql_name).column_name: value
                    for cql_name, value in row.items()
                    if not cql_name.endswith('_datetime') and value is not None
                    and (type(value) != str or len(value) > 0)
                }

                product = Product.create(**row_converted)

                if not batch:
                    product.save()

                print(u"DONE {} ; {}".format(i, row['code'].encode('utf-8')))
            except Exception:
                print(u"EXCEPTION {} ; {}".format(i,
                                                  row['code'].encode('utf-8')))
                exc_type, exc_value, exc_traceback = sys.exc_info()
                traceback.print_exception(exc_type,
                                          exc_value,
                                          exc_traceback,
                                          limit=10,
                                          file=sys.stderr)

        if batch:
            print("executing batch for rows {} to {}".format(
                df.index.min(), df.index.max()))
            b.execute()

        time_diff = dt.datetime.today().timestamp() - start_time
        print("TIMING {} rows/s".format(chunksize / time_diff))
        start_time = dt.datetime.today().timestamp()
def main():
    connection.default()

    # Management functions would normally be used in development, and possibly for deployments.
    # They are typically not part of a core application.
    log.info("### creating keyspace...")
    management.create_keyspace_simple(KEYSPACE, 1)
    log.info("### syncing model...")
    management.sync_table(FamilyMembers)

    # default uuid is assigned
    simmons = FamilyMembers.create(surname='Simmons', name='Gene', birth_year=1949, sex='m')

    # add members to his family later
    FamilyMembers.create(id=simmons.id, surname='Simmons', name='Nick', birth_year=1989, sex='m')
    sophie = FamilyMembers.create(id=simmons.id, surname='Simmons', name='Sophie', sex='f')

    nick = FamilyMembers.objects(id=simmons.id, surname='Simmons', name='Nick')
    try:
        nick.iff(birth_year=1988).update(birth_year=1989)
    except LWTException:
        print "precondition not met"

    # showing validation
    try:
        FamilyMembers.create(id=simmons.id, surname='Tweed', name='Shannon', birth_year=1957, sex='f')
    except ValidationError:
        log.exception('INTENTIONAL VALIDATION EXCEPTION; Failed creating instance:')
        FamilyMembers.create(id=simmons.id, surname='Tweed', name='Shannon', sex='f')

    log.info("### add multiple as part of a batch")
    # If creating many at one time, can use a batch to minimize round-trips
    hogan_id = uuid4()
    with BatchQuery() as b:
        FamilyMembers.batch(b).create(id=hogan_id, surname='Hogan', name='Hulk', sex='m')
        FamilyMembers.batch(b).create(id=hogan_id, surname='Hogan', name='Linda', sex='f')
        FamilyMembers.batch(b).create(id=hogan_id, surname='Hogan', name='Nick', sex='m')
        FamilyMembers.batch(b).create(id=hogan_id, surname='Hogan', name='Brooke', sex='f')

    log.info("### All members")
    for m in FamilyMembers.all():
        print m, m.birth_year, m.sex

    log.info("### Select by partition key")
    for m in FamilyMembers.objects(id=simmons.id):
        print m, m.birth_year, m.sex

    log.info("### Constrain on clustering key")
    for m in FamilyMembers.objects(id=simmons.id, surname=simmons.surname):
        print m, m.birth_year, m.sex

    log.info("### Constrain on clustering key")
    kids = FamilyMembers.objects(id=simmons.id, surname=simmons.surname, name__in=['Nick', 'Sophie'])

    log.info("### Delete a record")
    FamilyMembers(id=hogan_id, surname='Hogan', name='Linda').delete()
    for m in FamilyMembers.objects(id=hogan_id):
        print m, m.birth_year, m.sex

    management.drop_keyspace(KEYSPACE)
Beispiel #14
0
	def registration():
		reg_form = RegistrationForm()
		if reg_form.validate_on_submit():
			# handle regsitration here
			try:
				user_from_db = UserCredentials.objects.get(username=reg_form.username.data)
				
				flash('Username already taken')
				return redirect( url_for('index' ))
			except:
				pass	
			
			# create the new user

			with BatchQuery() as b:
				new_user = Users.batch(b).create(username=reg_form.username.data,
							first_name=reg_form.first_name.data,
							last_name=reg_form.last_name.data,
							created_date=datetime.datetime.now(datetime.timezone.utc))
				new_user_creds = UserCredentials.batch(b).create(username=reg_form.username.data,
										password=sha256_crypt.hash(reg_form.password.data),
										uuid=new_user.uuid)

			
				b.execute()
			
						
				flash('User created')
				return redirect( url_for('index' ))
		else:
			return render_template('registration.html', form=reg_form)
Beispiel #15
0
def populate_customer_invoice_table():
    sync_table(CustomerInvoice)
    customer_invoice = pd.read_csv(CUSTOMER_INVOICE_TABLE_FILE_NAME)
    # Insert data into Cassandra table in 100 row batches
    batch_size = 100
    batch_current_file_count = 0
    batch_manager = BatchQuery()
    for index, row in tqdm(customer_invoice.iterrows(),
                           total=customer_invoice.shape[0]):
        CustomerInvoice.batch(batch_manager) \
            .create(customer_id=int(row['customer_id']),
                    invoice_date=datetime.strptime(row['invoice_date'], "%Y-%m-%d %H:%M:%S"),
                    product_code=str(row['product_code']),
                    invoice_id=row['invoice_id'],
                    customer_email=row['customer_email'],
                    customer_phone_number=str(row['customer_phone_number']),
                    customer_country=row['customer_country'],
                    customer_postcode=str(row['customer_postcode']),
                    customer_house_number=str(row['customer_house_number']),
                    customer_has_loyalty_card=row['customer_has_loyalty_card'],
                    product_description=row['product_description'],
                    product_unit_price=row['product_unit_price'],
                    product_quantity=row['product_quantity'],
                    invoice_total=row['invoice_total'])
        batch_current_file_count += 1
        if batch_current_file_count == batch_size:
            batch_manager.execute()
            batch_current_file_count = 0
    batch_manager.execute()
Beispiel #16
0
def seen(request):
    # Record stats for items marked as seen on a mobile device
    # For workload purposes we ignore the posted data, and instead generate
    # some random data of our own, cached in memcached
    global SAMPLE_COUNT
    should_profile = False

    if settings.PROFILING:
        SAMPLE_COUNT += 1
        if SAMPLE_COUNT >= settings.SAMPLE_RATE:
            SAMPLE_COUNT = 0
            should_profile = True

    bundleids = cache.get('bundleids')
    if bundleids is None:
        bundleids = [uuid.uuid4() for _ in range(1000)]
        cache.set('bundleids', bundleids, 24 * 60 * 60)
    entryids = cache.get('entryids')
    if entryids is None:
        entryids = [uuid.uuid4() for _ in range(10000)]
        cache.set('entryids', entryids, 24 * 60 * 60)

    with statsd.pipeline() as pipe, BatchQuery() as b:
        for bundleid in random.sample(bundleids, random.randrange(3)):
            if should_profile:
                pipe.incr('workloadoutput.bundle.{}.seen'.format(bundleid.hex))
            for entryid in random.sample(entryids, random.randrange(5)):
                if should_profile:
                    pipe.incr('workloadoutput.bundle.{}.{}.seen'.format(
                        bundleid.hex, entryid.hex))
                BundleSeenModel(userid=request.user.id,
                                bundleid=bundleid,
                                entryid=entryid).save()

    return HttpResponse(json.dumps({}), content_type='text/json')
Beispiel #17
0
    def test_batch_if_not_exists(self):
        """ ensure 'IF NOT EXISTS' exists in statement when in batch """
        with mock.patch.object(self.session, 'execute') as m:
            with BatchQuery() as b:
                TestIfNotExistsModel.batch(b).if_not_exists().create(count=8)

        self.assertIn("IF NOT EXISTS", m.call_args[0][0].query_string)
Beispiel #18
0
    def test_batch_consistency(self):

        with mock.patch.object(self.session, 'execute') as m:
            with BatchQuery(consistency=CL.ALL) as b:
                TestConsistencyModel.batch(b).create(text="monkey")

        args = m.call_args

        self.assertEqual(CL.ALL, args[0][0].consistency_level)

        with mock.patch.object(self.session, 'execute') as m:
            with BatchQuery() as b:
                TestConsistencyModel.batch(b).create(text="monkey")

        args = m.call_args
        self.assertNotEqual(CL.ALL, args[0][0].consistency_level)
    def test_batch_update_conditional_several_rows(self):
        sync_table(TestUpdateModel)
        self.addCleanup(drop_table, TestUpdateModel)

        first_row = TestUpdateModel.create(partition=1,
                                           cluster=1,
                                           value=5,
                                           text="something")
        second_row = TestUpdateModel.create(partition=1,
                                            cluster=2,
                                            value=5,
                                            text="something")

        b = BatchQuery()
        TestUpdateModel.batch(b).if_not_exists().create(partition=1,
                                                        cluster=1,
                                                        value=5,
                                                        text='something else')
        TestUpdateModel.batch(b).if_not_exists().create(partition=1,
                                                        cluster=2,
                                                        value=5,
                                                        text='something else')
        TestUpdateModel.batch(b).if_not_exists().create(partition=1,
                                                        cluster=3,
                                                        value=5,
                                                        text='something else')

        # The response will be more than two rows because two of the inserts will fail
        with self.assertRaises(LWTException):
            b.execute()

        first_row.delete()
        second_row.delete()
        b.execute()
Beispiel #20
0
    def test_instance_update_in_batch(self):
        with mock.patch.object(self.session, "execute") as m:
            with BatchQuery() as b:
                self.instance.batch(b).timestamp(timedelta(seconds=30)).update(count=2)

        query = m.call_args[0][0].query_string
        "USING TIMESTAMP".should.be.within(query)
Beispiel #21
0
    def test_dml_none_success_case(self):
        """ Tests that passing None into the batch call clears any batch object """
        b = BatchQuery()

        q = DMLQuery(TestMultiKeyModel, batch=b)
        assert q._batch == b

        q.batch(None)
        assert q._batch is None
Beispiel #22
0
    def test_batch(self):
        with mock.patch.object(self.session, "execute") as m:
            with BatchQuery() as b:
                TestTimestampModel.timestamp(timedelta(seconds=10)).batch(b).create(count=1)

        query = m.call_args[0][0].query_string

        query.should.match(r"INSERT.*USING TIMESTAMP")
        query.should_not.match(r"TIMESTAMP.*INSERT")
Beispiel #23
0
 def test_batch_execute_no_timeout(self):
     with mock.patch.object(Session, 'execute',
                            autospec=True) as mock_execute:
         with BatchQuery() as b:
             BatchQueryLogModel.batch(b).create(k=2, v=2)
         mock_execute.assert_called_once_with(mock.ANY,
                                              mock.ANY,
                                              mock.ANY,
                                              timeout=NOT_SET)
Beispiel #24
0
    def test_dml_none_success_case(self):
        """ Tests that passing None into the batch call clears any batch object """
        b = BatchQuery()

        q = DMLQuery(TestMultiKeyModel, batch=b)
        self.assertEqual(q._batch, b)

        q.batch(None)
        self.assertIsNone(q._batch)
    def test_batch_query_different_connection(self):
        """
        Test BatchQuery with Models that have a different connection

        @since 3.7
        @jira_ticket PYTHON-613
        @expected_result queries should execute appropriately

        @test_category object_mapper
        """

        # Testing on a model class
        TestModel.__connection__ = 'cluster'
        AnotherTestModel.__connection__ = 'cluster2'

        with self.assertRaises(CQLEngineException):
            with BatchQuery() as b:
                TestModel.objects.batch(b).create(partition=1, cluster=1)
                AnotherTestModel.objects.batch(b).create(partition=1,
                                                         cluster=1)

        TestModel.__connection__ = None
        AnotherTestModel.__connection__ = None

        with BatchQuery(connection='cluster') as b:
            TestModel.objects.batch(b).create(partition=1, cluster=1)
            AnotherTestModel.objects.batch(b).create(partition=1, cluster=1)

        # Testing on a model instance
        with ContextQuery(TestModel, AnotherTestModel,
                          connection='cluster') as (tm, atm):
            obj1 = tm.objects.get(partition=1, cluster=1)
            obj2 = atm.objects.get(partition=1, cluster=1)

            obj1.__connection__ = 'cluster'
            obj2.__connection__ = 'cluster2'

            obj1.count = 4
            obj2.count = 4

        with self.assertRaises(CQLEngineException):
            with BatchQuery() as b:
                obj1.batch(b).save()
                obj2.batch(b).save()
Beispiel #26
0
    def _delete_messages(self, messages, types: str) -> None:
        start = time()
        with BatchQuery() as b:
            for message in messages:
                message.batch(b).delete()

        elapsed = time() - start
        if elapsed > 1:
            logger.info(
                f"batch deleted {len(message)} {types} in {elapsed:.2f}s")
Beispiel #27
0
    def test_batch_insert_if_not_exists_failure(self):
        """ tests that batch insertion with if_not_exists failure """
        id = uuid4()

        with BatchQuery() as b:
            TestIfNotExistsModel.batch(b).create(id=id,
                                                 count=8,
                                                 text='123456789')
        with BatchQuery() as b:
            TestIfNotExistsModel.batch(b).create(id=id,
                                                 count=9,
                                                 text='111111111111')

        q = TestIfNotExistsModel.objects(id=id)
        self.assertEqual(len(q), 1)

        tm = q.first()
        self.assertEqual(tm.count, 9)
        self.assertEqual(tm.text, '111111111111')
    def test_insert_success_case(self):

        b = BatchQuery()
        inst = TestMultiKeyModel.batch(b).create(partition=self.pkey, cluster=2, count=3, text='4')

        with self.assertRaises(TestMultiKeyModel.DoesNotExist):
            TestMultiKeyModel.get(partition=self.pkey, cluster=2)

        b.execute()

        TestMultiKeyModel.get(partition=self.pkey, cluster=2)
Beispiel #29
0
 def save_ts(self, ts_list: List[TSData]):
     b = BatchQuery()
     for ts_data in ts_list:
         func = TSFunctionRegistry.find_function(ts_data.ts_type_name)
         value_serialized: str = func.serialize(ts_data.values)
         TimeSeriesDataModel.batch(b).create(
             type=ts_data.ts_type_name,
             code=ts_data.code,
             visible_time=ts_data.visible_time,
             data=value_serialized)
     b.execute()
Beispiel #30
0
    def _update_messages(self, messages: List[MessageModel],
                         callback: callable) -> Optional[dt]:
        until = None

        with BatchQuery() as b:
            for message in messages:
                callback(message)
                message.batch(b).save()

                until = message.created_at

        return until