Esempio n. 1
0
    def test_tzaware_datetime_support(self):
        """Test that using timezone aware datetime instances works with the
        MinTimeUUID/MaxTimeUUID functions.
        """
        pk = uuid4()
        midpoint_utc = datetime.utcnow().replace(tzinfo=TzOffset(0))
        midpoint_helsinki = midpoint_utc.astimezone(TzOffset(3))

        # Assert pre-condition that we have the same logical point in time
        assert midpoint_utc.utctimetuple() == midpoint_helsinki.utctimetuple()
        assert midpoint_utc.timetuple() != midpoint_helsinki.timetuple()

        TimeUUIDQueryModel.create(partition=pk,
                                  time=uuid_from_time(midpoint_utc -
                                                      timedelta(minutes=1)),
                                  data='1')

        TimeUUIDQueryModel.create(partition=pk,
                                  time=uuid_from_time(midpoint_utc),
                                  data='2')

        TimeUUIDQueryModel.create(partition=pk,
                                  time=uuid_from_time(midpoint_utc +
                                                      timedelta(minutes=1)),
                                  data='3')

        assert ['1', '2'] == [
            o.data for o in TimeUUIDQueryModel.filter(
                TimeUUIDQueryModel.partition == pk,
                TimeUUIDQueryModel.time <= functions.MaxTimeUUID(midpoint_utc))
        ]

        assert ['1', '2'] == [
            o.data for o in TimeUUIDQueryModel.filter(
                TimeUUIDQueryModel.partition == pk, TimeUUIDQueryModel.time <=
                functions.MaxTimeUUID(midpoint_helsinki))
        ]

        assert ['2', '3'] == [
            o.data for o in TimeUUIDQueryModel.filter(
                TimeUUIDQueryModel.partition == pk,
                TimeUUIDQueryModel.time >= functions.MinTimeUUID(midpoint_utc))
        ]

        assert ['2', '3'] == [
            o.data for o in TimeUUIDQueryModel.filter(
                TimeUUIDQueryModel.partition == pk, TimeUUIDQueryModel.time >=
                functions.MinTimeUUID(midpoint_helsinki))
        ]
    def save_normal(normals, loc, scale, size, race_obj):
        """
        Saves a race_normal to C*

        raceNormalId = columns.UUID(primary_key=True, default=uuid.uuid4)
        raceId = columns.UUID(primary_key=True, default=uuid.uuid4)
        petCategoryName = columns.Text(required=False)
        petCategoryId = columns.UUID(primary_key=True, default=uuid.uuid4)
        currentTime = columns.DateTime()
        normals = columns.List()
        normalLoc = columns.Float()
        normalScale = columns.Float()
        normalSize = columns.Tuple()
        """
        dt = datetime.utcnow()
        uuid = uuid_from_time(datetime.utcnow())

        RaceNormal.create(
            raceNormalId=uuid,
            raceId=race_obj['raceId'],
            petCategoryName=race_obj['petCategoryName'],
            petCategoryId=race_obj['petCategoryId'],
            currentTime=dt,
            normals=normals,
            normalLoc=loc,
            normalScale=scale,
            normalSize=size
        )
        PetRaceCassandraDataStore.increment_counter_by_name('RaceNormal')
Esempio n. 3
0
    def save_normal(normals, loc, scale, size, race_obj):
        """
        Saves a race_normal to C*

        raceNormalId = columns.UUID(primary_key=True, default=uuid.uuid4)
        raceId = columns.UUID(primary_key=True, default=uuid.uuid4)
        petCategoryName = columns.Text(required=False)
        petCategoryId = columns.UUID(primary_key=True, default=uuid.uuid4)
        currentTime = columns.DateTime()
        normals = columns.List()
        normalLoc = columns.Float()
        normalScale = columns.Float()
        normalSize = columns.Tuple()
        """
        dt = datetime.utcnow()
        uuid = uuid_from_time(datetime.utcnow())

        RaceNormal.create(
            raceNormalId=uuid,
            raceId=race_obj['raceId'],
            petCategoryName=race_obj['petCategoryName'],
            petCategoryId=race_obj['petCategoryId'],
            currentTime=dt,
            normals=normals,
            normalLoc=loc,
            normalScale=scale,
            normalSize=size
        )
        PetRaceCassandraDataStore.increment_counter_by_name('RaceNormal')
Esempio n. 4
0
    def update_race(self, current_race, current_racers):
        for key, value in current_racers.items():
            uuid = uuid_from_time(datetime.utcnow())
            RaceResult.create(
                raceResultId=uuid,
                raceId=current_race['raceId'],
                raceParticipantId=key,
                petName=value['petName'],
                petCategoryName=current_race['petCategoryName'],
                finishPosition=value['finish_position'],
                finishTime=value['finish_time'],
                startTime=current_race['startTime']
            )
            PetRaceCassandraDataStore.increment_counter_by_name('RaceResult')

        metrics = self.cluster.metrics.stats
        hosts = int(metrics.known_hosts())
        connected_to = int(metrics.connected_to())
        open_connections = int(metrics.open_connections())

        Metric.create(
            metricId=uuid_from_time(datetime.utcnow()),
            connectionErrors=metrics.connection_errors,
            writeTimeouts=metrics.write_timeouts,
            readTimeouts=metrics.read_timeouts,
            unavailables=metrics.unavailables,
            otherErrors=metrics.other_errors,
            retries=metrics.retries,
            ignores=metrics.ignores,
            knownHosts=hosts,
            connectedTo=connected_to,
            openConnections=open_connections,
            reqCount=metrics.request_timer['count'],
            reqMinLatency=metrics.request_timer['min'],
            reqMaxLatency=metrics.request_timer['max'],
            reqMeanLatency=metrics.request_timer['mean'],
            reqStdev=metrics.request_timer['stdev'],
            reqMedian=metrics.request_timer['median'],
            req75percentile=metrics.request_timer['75percentile'],
            req97percentile=metrics.request_timer['97percentile'],
            req98percentile=metrics.request_timer['98percentile'],
            req99percentile=metrics.request_timer['99percentile'],
            req999percentile=metrics.request_timer['999percentile'],
            dateCreated=datetime.utcnow(),
        )
Esempio n. 5
0
    def test_tzaware_datetime_support(self):
        """Test that using timezone aware datetime instances works with the
        MinTimeUUID/MaxTimeUUID functions.
        """
        pk = uuid4()
        midpoint_utc = datetime.utcnow().replace(tzinfo=TzOffset(0))
        midpoint_helsinki = midpoint_utc.astimezone(TzOffset(3))

        # Assert pre-condition that we have the same logical point in time
        assert midpoint_utc.utctimetuple() == midpoint_helsinki.utctimetuple()
        assert midpoint_utc.timetuple() != midpoint_helsinki.timetuple()

        TimeUUIDQueryModel.create(
            partition=pk,
            time=uuid_from_time(midpoint_utc - timedelta(minutes=1)),
            data='1')

        TimeUUIDQueryModel.create(
            partition=pk,
            time=uuid_from_time(midpoint_utc),
            data='2')

        TimeUUIDQueryModel.create(
            partition=pk,
            time=uuid_from_time(midpoint_utc + timedelta(minutes=1)),
            data='3')

        assert ['1', '2'] == [o.data for o in TimeUUIDQueryModel.filter(
            TimeUUIDQueryModel.partition == pk,
            TimeUUIDQueryModel.time <= functions.MaxTimeUUID(midpoint_utc))]

        assert ['1', '2'] == [o.data for o in TimeUUIDQueryModel.filter(
            TimeUUIDQueryModel.partition == pk,
            TimeUUIDQueryModel.time <= functions.MaxTimeUUID(midpoint_helsinki))]

        assert ['2', '3'] == [o.data for o in TimeUUIDQueryModel.filter(
            TimeUUIDQueryModel.partition == pk,
            TimeUUIDQueryModel.time >= functions.MinTimeUUID(midpoint_utc))]

        assert ['2', '3'] == [o.data for o in TimeUUIDQueryModel.filter(
            TimeUUIDQueryModel.partition == pk,
            TimeUUIDQueryModel.time >= functions.MinTimeUUID(midpoint_helsinki))]
Esempio n. 6
0
def add_message(request):
	print request.user
	b =  json.loads(request.body)
	raw_date_submit = datetime.utcnow()
	date_submit = raw_date_submit.date()
	uuid_time = uuid_from_time(raw_date_submit)
	
	bind_values = ['*****@*****.**',date_submit,uuid_time,b['content']]
	db_connection.bind_and_execute_stmt('INSERT',bind_values)

	return HttpResponse(json.dumps([{'email_id' : '*****@*****.**' , 'date_happened' : date_submit.isoformat() , 'event_time' : unix_time_from_uuid1(uuid_time) , 'content' : b['content']}]))
Esempio n. 7
0
    def from_datetime(self, dt):
        """
        generates a UUID for a given datetime

        :param dt: datetime
        :type dt: datetime
        :return:
        """
        msg = "cqlengine.columns.TimeUUID.from_datetime is deprecated. Use cassandra.util.uuid_from_time instead."
        warnings.warn(msg, DeprecationWarning)
        log.warning(msg)
        return util.uuid_from_time(dt)
Esempio n. 8
0
    def save_pet_categories(self, categories):

        self.connect_cass()

        for cat in categories:
            speed = float(cat['speed'])

            PetCategory.create(
                petCategoryId=uuid_from_time(datetime.utcnow()),
                name=cat['name'],
                speed=speed
            )
            self.logger.debug("pet cat created: %s", cat['name'])
Esempio n. 9
0
    def test_conversion_specific_date(self):
        dt = datetime(1981, 7, 11, microsecond=555000)

        uuid = util.uuid_from_time(dt)

        from uuid import UUID
        assert isinstance(uuid, UUID)

        ts = (uuid.time - 0x01b21dd213814000) / 1e7 # back to a timestamp
        new_dt = datetime.utcfromtimestamp(ts)

        # checks that we created a UUID1 with the proper timestamp
        assert new_dt == dt
Esempio n. 10
0
    def test_conversion_specific_date(self):
        dt = datetime(1981, 7, 11, microsecond=555000)

        uuid = util.uuid_from_time(dt)

        from uuid import UUID
        assert isinstance(uuid, UUID)

        ts = (uuid.time - 0x01b21dd213814000) / 1e7  # back to a timestamp
        new_dt = datetime.utcfromtimestamp(ts)

        # checks that we created a UUID1 with the proper timestamp
        assert new_dt == dt
 def update_race(self, current_race, current_racers):
     for key, value in current_racers.items():
         uuid = uuid_from_time(datetime.utcnow())
         RaceResult.create(
             raceResultId=uuid,
             raceId=current_race['raceId'],
             raceParticipantId=key,
             petName=value['petName'],
             petCategoryName=current_race['petCategoryName'],
             finishPosition=value['finish_position'],
             finishTime=value['finish_time'],
             startTime=current_race['startTime']
         )
         PetRaceCassandraDataStore.increment_counter_by_name('RaceResult')
Esempio n. 12
0
def insert(fec, correo, titulo, descripcion):

    cluster = Cluster(['master'], protocol_version=3)
    session = cluster.connect()
    myuuid = uuid_from_time(time.time())
    #query = session.prepare("select * from soporte.tickets_por_rango_fechas where fec >= '2014-01-01' and fec <= '2015-12-31' ALLOW FILTERING")
    query = SimpleStatement(
        "INSERT INTO soporte.tickets_por_usuario_rango_fechas(fec,correo,idticket,titulo,descripcion)values(%s,%s,%s,%s,%s)",
        consistency_level=ConsistencyLevel.QUORUM)
    session.execute(query, (fec, correo, myuuid, titulo, descripcion))
    query2 = SimpleStatement(
        "INSERT INTO soporte.tickets_por_rango_fechas(fec,correo,idticket,titulo,descripcion)values(%s,%s,%s,%s,%s)",
        consistency_level=ConsistencyLevel.QUORUM)
    session.execute(query2, (fec, correo, myuuid, titulo, descripcion))
    return "<p>Ticket creado satisfactoriamente </p>"
 def save_racer_current_point(current_race, racer, race_sample):
     uuid = uuid_from_time(datetime.utcnow())
     RaceData.create(
         raceDataId=uuid,
         petId=racer['petId'],
         raceId=current_race['raceId'],
         petName=racer['petName'],
         petCategoryName=current_race['petCategoryName'],
         petCategoryId=current_race['petCategoryId'],
         interval=race_sample['sample_iteration'],
         runnerDistance=race_sample['distance_this_sample'],
         runnerPreviousDistance=race_sample['previous_distance'],
         startTime=current_race['startTime'],
         finished=race_sample['finished']
     )
     PetRaceCassandraDataStore.increment_counter_by_name('RaceData')
Esempio n. 14
0
 def save_racer_current_point(current_race, racer, race_sample):
     uuid = uuid_from_time(datetime.utcnow())
     RaceData.create(
         raceDataId=uuid,
         petId=racer['petId'],
         raceId=current_race['raceId'],
         petName=racer['petName'],
         petCategoryName=current_race['petCategoryName'],
         petCategoryId=current_race['petCategoryId'],
         interval=race_sample['sample_iteration'],
         runnerDistance=race_sample['distance_this_sample'],
         runnerPreviousDistance=race_sample['previous_distance'],
         startTime=current_race['startTime'],
         finished=race_sample['finished']
     )
     PetRaceCassandraDataStore.increment_counter_by_name('RaceData')
Esempio n. 15
0
def insert_member(session, name=None, age=None):
    id = uuid_from_time(time.time())
    session.execute(
        """
        INSERT INTO {keyspace}.members (id, name, age)
        VALUES (%s, %s, %s);
        """.format(keyspace=KEYSPACE), (
            id,
            name,
            age,
        ))
    return {
        'id': str(id),
        'name': name,
        'age': age,
    }
Esempio n. 16
0
    def post(self):
        author_id = get_user_id_from_jwt()

        data = request.get_json(silent=True)

        type = data.get('type', 'text')
        chat_id = data.get('chat_id')
        message_id = str(uuid_from_time(time.time()))
        text = data.get('text', '')
        asset_name = data.get('asset_name', '')
        meta_data = data.get('meta_data', {})
        connection.setup(hosts=CASSANDRA_HOSTS, default_keyspace=CHAT_KEYSPACE)

        if type not in ChatMessageByChatId.allowed_type:
            return make_response("Type not allowed. Only allowed type: " + ChatMessageByChatId.allowed_type, 403)

        if type == 'glimpse':
            seconds_allowed = meta_data.get('seconds_allowed', 10)
            effect = meta_data.get('effect')
            meta_data = {
                'seconds_allowed': seconds_allowed,
                'effect': effect
            }

        elif type == 'glimpse_narrative':
            path_id = meta_data.get('path_id')
            meta_data = {
                'path_id': path_id,
            }

        meta_data = json.dumps(meta_data)

        ChatMessageByChatId.create(
            chat_id=chat_id,
            message_id=message_id,
            author_id=author_id,
            type=type,
            text=text,
            asset_name=asset_name,
            meta_data=meta_data,
        )

        #self._message_sent(chat_id, author_id, text, asset_name)

        return {
            "success": True
        }
Esempio n. 17
0
    def from_datetime(self, dt):
        """
        generates a UUID for a given datetime

        :param dt: datetime
        :type dt: datetime
        :return: uuid1

        .. deprecated:: 2.6.0

            Use :func:`cassandra.util.uuid_from_time`

        """
        msg = "cqlengine.columns.TimeUUID.from_datetime is deprecated. Use cassandra.util.uuid_from_time instead."
        warnings.warn(msg, DeprecationWarning)
        log.warning(msg)
        return util.uuid_from_time(dt)
Esempio n. 18
0
    def test_uuid_from_time(self):
        t = time.time()
        seq = 0x2aa5
        node = uuid.getnode()
        u = util.uuid_from_time(t, node, seq)
        # using AlmostEqual because time precision is different for
        # some platforms
        self.assertAlmostEqual(util.unix_time_from_uuid1(u), t, 4)
        self.assertEqual(u.node, node)
        self.assertEqual(u.clock_seq, seq)

        # random node
        u1 = util.uuid_from_time(t, clock_seq=seq)
        u2 = util.uuid_from_time(t, clock_seq=seq)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u1), t, 4)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u2), t, 4)
        self.assertEqual(u.clock_seq, seq)
        # not impossible, but we shouldn't get the same value twice
        self.assertNotEqual(u1.node, u2.node)

        # random seq
        u1 = util.uuid_from_time(t, node=node)
        u2 = util.uuid_from_time(t, node=node)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u1), t, 4)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u2), t, 4)
        self.assertEqual(u.node, node)
        # not impossible, but we shouldn't get the same value twice
        self.assertNotEqual(u1.clock_seq, u2.clock_seq)

        # node too large
        with self.assertRaises(ValueError):
            u = util.uuid_from_time(t, node=2**48)

        # clock_seq too large
        with self.assertRaises(ValueError):
            u = util.uuid_from_time(t, clock_seq=0x4000)

        # construct from datetime
        dt = util.datetime_from_timestamp(t)
        u = util.uuid_from_time(dt, node, seq)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u), t, 4)
        self.assertEqual(u.node, node)
        self.assertEqual(u.clock_seq, seq)
    def test_uuid_from_time(self):
        t = time.time()
        seq = 0x2aa5
        node = uuid.getnode()
        u = util.uuid_from_time(t, node, seq)
        # using AlmostEqual because time precision is different for
        # some platforms
        self.assertAlmostEqual(util.unix_time_from_uuid1(u), t, 4)
        self.assertEqual(u.node, node)
        self.assertEqual(u.clock_seq, seq)

        # random node
        u1 = util.uuid_from_time(t, clock_seq=seq)
        u2 = util.uuid_from_time(t, clock_seq=seq)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u1), t, 4)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u2), t, 4)
        self.assertEqual(u.clock_seq, seq)
        # not impossible, but we shouldn't get the same value twice
        self.assertNotEqual(u1.node, u2.node)

        # random seq
        u1 = util.uuid_from_time(t, node=node)
        u2 = util.uuid_from_time(t, node=node)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u1), t, 4)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u2), t, 4)
        self.assertEqual(u.node, node)
        # not impossible, but we shouldn't get the same value twice
        self.assertNotEqual(u1.clock_seq, u2.clock_seq)

        # node too large
        with self.assertRaises(ValueError):
            u = util.uuid_from_time(t, node=2 ** 48)

        # clock_seq too large
        with self.assertRaises(ValueError):
            u = util.uuid_from_time(t, clock_seq=0x4000)

        # construct from datetime
        dt = util.datetime_from_timestamp(t)
        u = util.uuid_from_time(dt, node, seq)
        self.assertAlmostEqual(util.unix_time_from_uuid1(u), t, 4)
        self.assertEqual(u.node, node)
        self.assertEqual(u.clock_seq, seq)
Esempio n. 20
0
    def export(self, name, columns, points):
        """Write the points to the Cassandra cluster."""
        logger.debug("Export {} stats to Cassandra".format(name))

        # Remove non number stats and convert all to float (for Boolean)
        data = {k: float(v) for (k, v) in dict(zip(columns, points)).iteritems() if isinstance(v, Number)}

        # Write input to the Cassandra table
        try:
            self.session.execute(
                """
                INSERT INTO localhost (plugin, time, stat)
                VALUES (%s, %s, %s)
                """,
                (name, uuid_from_time(datetime.now()), data)
            )
        except Exception as e:
            logger.error("Cannot export {} stats to Cassandra ({})".format(name, e))
Esempio n. 21
0
def cassandra_insert_colaborador(lids, comision, fec_inicio, nombre, salario):
    myuuid = uuid_from_time(time.time())
    print('')
    print("------->MYUUID:::", myuuid)
    id_colaborador = str(myuuid)
    print("------->IDCOLABORADOR:::", id_colaborador)
    print("--->LIDS: ", lids)
    area = get_areas_for_colaborador(lids)
    print("----> AREA::", area)
    cluster = Cluster(['master'], protocol_version=3)
    session = cluster.connect()
    query = SimpleStatement(
        "INSERT INTO cpm.colaboradores(id_colaborador,area,comision,fec_inicio,nombre,salario)values(%s,%s,%s,%s,%s,%s)",
        consistency_level=ConsistencyLevel.QUORUM)
    session.execute(query,
                    (id_colaborador, area, float(comision), fec_inicio, nombre,
                     float(salario)))
    return "Colaborador ingresado correctamente"
Esempio n. 22
0
    def save_pets(self, pets_create, category_name):

        self.connect_cass()

        q = PetCategory.objects.filter(name=category_name)
        if len(q) is not 1:
            raise ValueError('category not found: ', category_name)
        pet_cat = q.first()

        for _p in pets_create:
            Pet.create(
                petId=uuid_from_time(datetime.utcnow()),
                name=_p['name'],
                description=_p['description'],
                petCategoryName=pet_cat['name'],
                petCategoryId=pet_cat['petCategoryId'],
                petSpeed=pet_cat['speed']
            )
            self.logger.debug("pet created: %s", _p['name'])
Esempio n. 23
0
def genericize(columns, parameters):
    def date_handler(obj):
        return obj.isoformat() if hasattr(obj, 'isoformat') else obj

    parameters = flatten(deepcopy(parameters))

    if 'id' not in parameters:
        if 'date' in parameters:
            parameters['id'] = uuid_from_time(parameters['date'])
        else:
            parameters['id'] = uuid.uuid4()

    generic = {'b_': {}, 'd_': {}, 'i_': {}, 'f_': {}, 's_': {}, 'l_': {}}
    for key, value in parameters.items():
        if key not in columns:
            if type(value) is bool:
                prefix = 'b_'
            elif type(value) is datetime:
                prefix = 'd_'
            elif type(value) is int:
                prefix = 'i_'
            elif type(value) is float:
                prefix = 'f_'
            elif isinstance(value, (str, unicode)):
                prefix = 's_'
            elif type(value) is list:
                prefix = 'l_'
                value = '\n'.join([str(x) for x in value])
            elif value is not None:
                prefix = 's_'
                value = json.dumps(value, default=date_handler)
            if value is not None:
                generic[prefix][prefix + key] = value
            del parameters[key]
    for key, value in generic.items():
        if key in columns:
            parameters[key] = value
    for name in columns:
        if name not in parameters:
            parameters[name] = None
    return parameters
Esempio n. 24
0
    def export(self, name, columns, points):
        """Write the points to the Cassandra cluster."""
        logger.debug("Export {} stats to Cassandra".format(name))

        # Remove non number stats and convert all to float (for Boolean)
        data = {
            k: float(v)
            for (k, v) in dict(zip(columns, points)).iteritems()
            if isinstance(v, Number)
        }

        # Write input to the Cassandra table
        try:
            self.session.execute(
                """
                INSERT INTO localhost (plugin, time, stat)
                VALUES (%s, %s, %s)
                """, (name, uuid_from_time(datetime.now()), data))
        except Exception as e:
            logger.error("Cannot export {} stats to Cassandra ({})".format(
                name, e))
Esempio n. 25
0
        for _ in range(random.randint(5, 12))) + ".jpg"

    categoria_tupla = random.choice(categorias)
    categoria = categoria_tupla[0]
    nombre_categoria_es = categoria_tupla[1]
    nombre_categoria_en = categoria_tupla[2]
    nombre_categoria_fr = categoria_tupla[3]
    nombre_categoria = {
        'es': nombre_categoria_es,
        'en': nombre_categoria_en,
        'fr': nombre_categoria_fr
    }

    timestamp_producto = random_date(start, end)
    alta_producto = timestamp_producto.strftime("%Y")
    codigo_referencia = uuid_from_time(timestamp_producto)

    timestamp_marketing = random_date(start, end)
    tag_marketing = random.choice(tags_marketing)

    batch = BatchStatement(BatchType.LOGGED)
    prepared = session.prepare(
        "INSERT INTO productos (codigo_referencia, nombre_producto, alta_producto, precio_producto, descripcion, url_imagen, categoria, nombre_categoria)"
        + " VALUES (?, ?, ?, " + "{:0.2f}".format(precio_producto) +
        ", ?, ?, ?, ?)")
    batch.add(prepared, (codigo_referencia, nombre_producto, alta_producto,
                         descripcion, url_imagen, categoria, nombre_categoria))
    prepared = session.prepare(
        "INSERT INTO productos_por_marketing " +
        "(codigo_referencia, timestamp_marketing, tag_marketing, nombre_producto, "
        + " precio_producto, url_imagen) VALUES (?, ?, ?, ?, " +
Esempio n. 26
0
    def handle(self, **options):
        print('Running syncdb for Cassandra')
        sync_cassandra.Command().execute(**options)

        spinner = cycle('|/-\\')

        print('Creating 1000 random users')
        users = []
        user_ids = []
        for i in range(10**3):
            print('\r{} {}'.format(next(spinner), i), end='')
            user = UserModel(name=random_string(title=True))
            user.save()
            users.append(user)
            user_ids.append(user.id)
        print('\r      ', end='\r')

        print('Creating following relationships between these users')
        for i, user in enumerate(users):
            print('\r{} {}'.format(next(spinner), i), end='')
            followers = random.sample(user_ids, random.randrange(50))
            user.following = [uuid for uuid in followers if user.id != uuid]
            user.save()
        print('\r      ', end='\r')

        print('Creating 100k random feed entries')
        random_dates = islice(random_datetime_generator(), 10**4)
        feedids = [uuid_from_time(t) for t in random_dates]
        for i, feedid in enumerate(feedids):
            print('\r{} {}'.format(next(spinner), i), end='')
            entry = FeedEntryModel(userid=random.choice(user_ids),
                                   id=feedid,
                                   comment_count=random.randrange(10))
            entry.save()
        print('\r       ', end='\r')

        print('Creating 5000 random inbox entries')
        types = (CommentedInboxEntryModel, LikeInboxEntryModel,
                 NewFollowerInboxEntryModel)
        random_dates = islice(random_datetime_generator(), 5000)
        inboxids = map(uuid_from_time, random_dates)
        for i, inboxid in enumerate(inboxids):
            print('\r{} {}'.format(next(spinner), i), end='')
            inboxtype = random.choice(types)
            fields = {
                'userid':
                random.choice(user_ids),
                'id':
                inboxid,
                'feedentryid':
                random.choice(feedids),
                'comment_text':
                ' '.join(
                    [random_string() for _ in range(random.randrange(3, 10))]),
                'likerid':
                random.choice(user_ids),
                'followerid':
                random.choice(user_ids),
            }
            entry = inboxtype(**fields)
            entry.save()
        print('\r       ', end='\r')

        print('Creating 1000 random bundles')
        random_dates = islice(random_datetime_generator(), 1000)
        bundleids = map(uuid_from_time, random_dates)
        for i, bundleid in enumerate(bundleids):
            print('\r{} {}'.format(next(spinner), i), end='')
            entrycount = random.randrange(2, 10)
            # pick entrycount unique feedids, not to be used again
            feedids, feedentries = feedids[:-entrycount], feedids[-entrycount:]
            entry = BundleEntryModel(userid=random.choice(user_ids),
                                     id=bundleid,
                                     comment_count=random.randrange(10),
                                     entry_ids=feedentries)
            entry.save()
        print('\r       ', end='\r')
Esempio n. 27
0
def timeuuid_now():
    return uuid_from_time(datetime.datetime.now())
Esempio n. 28
0
    def parse_indigo_log_file(self, log_file):
        self.cassandra_client.connect(["s000.blurdev.com"])
        cassandra_session = self.cassandra_client.session
        gzip_file_handler = gzip.open(log_file)

        line_counter = 0
        try:

            linestr = gzip_file_handler.readline()
            pstmt = cassandra_session.prepare(
                "INSERT INTO device_logs.log_entry(id,deviceId,eventDatetime,api,appid,userid,resultCode,latency,service,method)VALUES(?,?,?,?,?,?,?,?,?,?)"
            )

            batch_statement = BatchStatement()
            while linestr:

                # 2016-01-04 03:00:00,472 +0000 [0:0:0:0:0:0:0:1] INFO  [qtp389572888-163610] com.motorola.blur.cloudsvc.service.CloudService#internalCrossZoneApiCall(1237) - [CloudService.Report.RemoteZone]:
                # api=/v1/dp/validatesession url=https://api-sg.svcmot.com/v1/dp/validatesession.json?_remotecall=1&_indigo_zone=CN&authtoken=0-fa644269f5406c77fb0143a35a9d265a1031705043&deviceid=1288446770950721536
                # result=400 time=599

                # -----use this one.
                # 2016-01-04 03:59:59,293 +0000 [0:0:0:0:0:0:0:1] INFO  [qtp389572888-163690] com.motorola.blur.cloudsvc.service.CloudService#invoke(579) - [CloudService.Report.API]:
                # api=/v1/checkinuploader/upload appid=YDYWOLQB1NM35HHYPKOZW3V3Z33TC85I userid=null deviceid=1342508016933724160 status=200 time=1170 method=POST service=ccs_uploader
                # URI=/v1/checkinuploader/upload.pb querystring:deviceid=1342508016933724160&appId=YDYWOLQB1NM35HHYPKOZW3V3Z33TC85I&geolocation=China-East&geocheckintimestamp=1451879998028

                # print linestr
                if linestr.count("CloudService.Report.API") > 0:
                    log_fileds = linestr.split(" ")
                    # print len(log_fileds)
                    # print log_fileds
                    if len(log_fileds) == 20:

                        line_counter += 1

                        field_event_date = log_fileds[0]
                        field_event_time = self.extract_time(log_fileds[1])
                        field_api = self.grep_value(log_fileds[10])
                        field_appid = self.grep_value(log_fileds[11])
                        field_userid = self.grep_value(log_fileds[12])
                        field_deviceid = self.grep_value(log_fileds[13])
                        field_result_code = self.get_int(self.grep_value(log_fileds[14]))
                        field_latency = self.get_int(self.grep_value(log_fileds[15]))
                        field_method = self.convert_method(self.grep_value(log_fileds[16]))
                        field_service = self.grep_value(log_fileds[17])

                        # print field_event_date,field_event_time,field_api,field_appid,field_userid,field_deviceid,field_result_code,field_latency,field_method,field_service

                        edt = self.to_datetime(field_event_date, field_event_time)
                        batch_statement.add(
                            pstmt.bind(
                                (
                                    util.uuid_from_time(edt),
                                    field_deviceid,
                                    edt,
                                    field_api,
                                    field_appid,
                                    field_userid,
                                    field_result_code,
                                    field_latency,
                                    field_service,
                                    field_method,
                                )
                            )
                        )

                        if line_counter % 6000 == 0:
                            cassandra_session.execute(batch_statement)
                            batch_statement = BatchStatement()

                linestr = gzip_file_handler.readline()

            if line_counter % 6000 != 0:
                cassandra_session.execute(batch_statement)

        except BaseException, e:
            print e
Esempio n. 29
0
    def create_race(length, description, pet_category_name):
        dt = datetime.utcnow()
        uuid = uuid_from_time(dt)

        # TODO this is loading all pets ... random number??
        race_pets = PetRaceCassandraDataStore.get_pets_by_category_name(pet_category_name)
        pet_c = PetRaceCassandraDataStore.get_pet_category_by_name(pet_category_name)

        participants = {}
        pet_ids = []
        for p in race_pets:
            pet_ids.append(str(p["petId"]))
            p_id = uuid_from_time(datetime.utcnow())

            participant = {
                'raceParticipantId': str(p_id),
                'petId': str(p["petId"]),
                'raceId': str(uuid),
                'petName': p["name"],
                'petCategoryName': pet_c['name'],
                'petCategoryId': str(pet_c['petCategoryId']),
                'startTime': dt,
                'endTime': None,
                'finished': False,
                'finished_position': None,
                'current_distance': 0
            }

            participants[str(p_id)] = participant

            RaceParticipant.create(
                raceParticipantId=p_id,
                petId=p["petId"],
                raceId=uuid,
                petName=p["name"],
                petCategoryName=pet_c['name'],
                petCategoryId=pet_c['petCategoryId'],
                startTime=dt
            )
            PetRaceCassandraDataStore.increment_counter_by_name('RaceParticipant')

        saved_race = {
            'raceId': str(uuid),
            'numOfPets': len(race_pets),
            'length': length,
            'description': description,
            'petCategoryId': str(pet_c['petCategoryId']),
            'petCategoryName': pet_c['name'],
            'startTime': dt,
            'racersIds': pet_ids,
            'baseSpeed': pet_c['speed']
        }

        Race.create(
            raceId=uuid,
            numOfPets=len(race_pets),
            length=length,
            description=description,
            petCategoryId=pet_c['petCategoryId'],
            petCategoryName=pet_c['name'],
            startTime=dt,
            racersIds=pet_ids,
            baseSpeed=pet_c['speed']
        )

        PetRaceCassandraDataStore.increment_counter_by_name('Race')

        # self.logger.debug("race created")
        # self.logger.debug("race created: %s", saved_race)
        # self.logger.debug("race created: %s", participants)

        return saved_race, participants
Esempio n. 30
0
def cassandra_insert_patentes(nombre_patente,
                              descripcion,
                              fec_presentacion,
                              id_pais,
                              listaareas,
                              listainventores,
                              listacolaboradores,
                              idmasivo=None):
    #FIJO EN TODAS VAN LA LISTA DE MAPAS DE LOS COLABORADORES..ARMEMOLA
    colaboradores = crear_mapa_colaboradores(listacolaboradores)
    print("##############--->:: ", colaboradores)
    inventores = crear_mapa_inventores(listainventores)
    print("##############--->:: ", inventores)
    areas = cassandra_get_nombre_areasv2(listaareas)
    print("##############--->:: ", areas)
    myuuid = None
    if idmasivo == None:
        myuuid = uuid_from_time(time.time())
    else:
        myuuid = idmasivo
    #print('')
    print("------->MYUUID:::", myuuid)
    id_invento = str(myuuid)
    #VOY A INGRESAR POR PAIS...
    #NOMBRE DEL PAIS>>
    nombre_pais = obtener_nombre_pais(id_pais)
    querypais = SimpleStatement(
        "INSERT INTO cpm.inventos_por_pais(id_invento,id_pais,nombre_pais,nombre,fec_presentacion,inventores,colaboradores,descripcion,area)values(%s,%s,%s,%s,%s,%s,%s,%s,%s)",
        consistency_level=ConsistencyLevel.QUORUM)
    cluster = Cluster(['master'], protocol_version=3)
    session = cluster.connect()
    session.execute(
        querypais,
        (id_invento, id_pais, nombre_pais, nombre_patente, fec_presentacion,
         inventores, colaboradores, descripcion, areas))

    #VOY A INGRESAR POR inventor

    for idinv in listainventores:
        infoinventor = cassandra_get_inventor_por_id(idinv)
        for info in infoinventor:
            querypais = SimpleStatement(
                "INSERT INTO cpm.inventos_por_inventor(nombre_inventor,id_invento,id_inventor,nacionalidad,sexo,fec_nac,nombre,fec_presentacion,descripcion,area,colaboradores,pais,id_pais)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
                consistency_level=ConsistencyLevel.QUORUM)
            cluster = Cluster(['master'], protocol_version=3)
            session = cluster.connect()
            session.execute(
                querypais,
                (info.nombre, id_invento, idinv, info.pais, "F", "1996-01-01",
                 nombre_patente, fec_presentacion, descripcion, areas,
                 colaboradores, nombre_pais, id_pais))
            break

    #VOY A INGRESAR POR AREA
    for idarea in listaareas:
        nombre_area = cassandra_get_area_por_id(idarea)
        querypais = SimpleStatement(
            "INSERT INTO cpm.inventos_por_area(area,nombre_area,id_invento,inventores,colaboradores,nombre,fec_presentacion,descripcion,pais,id_pais)values(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)",
            consistency_level=ConsistencyLevel.QUORUM)
        cluster = Cluster(['master'], protocol_version=3)
        session = cluster.connect()
        session.execute(querypais,
                        (idarea, nombre_area, id_invento, inventores,
                         colaboradores, nombre_patente, fec_presentacion,
                         descripcion, nombre_pais, id_pais))

    return "Patente ingresada con exito!!!"
    def create_race(length, description, pet_category_name):
        dt = datetime.utcnow()
        uuid = uuid_from_time(dt)

        # TODO this is loading all pets ... random number??
        race_pets = PetRaceCassandraDataStore.get_pets_by_category_name(pet_category_name)
        pet_c = PetRaceCassandraDataStore.get_pet_category_by_name(pet_category_name)

        participants = {}
        pet_ids = []
        for p in race_pets:
            pet_ids.append(str(p["petId"]))
            p_id = uuid_from_time(datetime.utcnow())

            participant = {
                'raceParticipantId': str(p_id),
                'petId': str(p["petId"]),
                'raceId': str(uuid),
                'petName': p["name"],
                'petCategoryName': pet_c['name'],
                'petCategoryId': str(pet_c['petCategoryId']),
                'startTime': dt,
                'endTime': None,
                'finished': False,
                'finished_position': None,
                'current_distance': 0
            }

            participants[str(p_id)] = participant

            RaceParticipant.create(
                raceParticipantId=p_id,
                petId=p["petId"],
                raceId=uuid,
                petName=p["name"],
                petCategoryName=pet_c['name'],
                petCategoryId=pet_c['petCategoryId'],
                startTime=dt
            )
            PetRaceCassandraDataStore.increment_counter_by_name('RaceParticipant')

        saved_race = {
            'raceId': str(uuid),
            'numOfPets': len(race_pets),
            'length': length,
            'description': description,
            'petCategoryId': str(pet_c['petCategoryId']),
            'petCategoryName': pet_c['name'],
            'startTime': dt,
            'racersIds': pet_ids,
            'baseSpeed': pet_c['speed']
        }

        Race.create(
            raceId=uuid,
            numOfPets=len(race_pets),
            length=length,
            description=description,
            petCategoryId=pet_c['petCategoryId'],
            petCategoryName=pet_c['name'],
            startTime=dt,
            racersIds=pet_ids,
            baseSpeed=pet_c['speed']
        )

        PetRaceCassandraDataStore.increment_counter_by_name('Race')

        # self.logger.debug("race created")
        # self.logger.debug("race created: %s", saved_race)
        # self.logger.debug("race created: %s", participants)

        return saved_race, participants
Esempio n. 32
0
 def test_timeuuid(self):
     uuid = uuid_from_time(datetime(2015, 1, 1))
     self.read_write_test('timeuuid', uuid)
Esempio n. 33
0
 def test_timeuuid(self):
     uuid = uuid_from_time(datetime(2015, 1, 1))
     self.read_write_test('timeuuid', uuid)
Esempio n. 34
0
def generate_time_uuid():
    return uuid_from_time(datetime.now())
Esempio n. 35
0
        for element in data:
            date = dateutil.parser.parse(element['source']['date'])
            date = time.mktime(date.timetuple())
            seriesdates.append(date)
            for tag in element['source']['tags']:
                if tag not in series:
                    series[tag] = []
                series[tag].append(date)

cluster = Cluster()
session = cluster.connect('db_tsa')
session.default_timeout = 3600

clientname = 'movi'
#guardando en cassandra la serie principal
series_name = clientname
for series_event_time in seriesdates:
    new_uuid = util.uuid_from_time(int(float(series_event_time)))
    session.execute(
        "INSERT INTO time_series (name, event_time) VALUES (%s, %s)",
        (series_name, new_uuid))

#guardando las series de los tags
for tag_name, tag_series in series.items():
    series_name = clientname + '_' + tag_name
    for series_event_time in tag_series:
        new_uuid = util.uuid_from_time(int(float(series_event_time)))
        session.execute(
            "INSERT INTO time_series (name, event_time) VALUES (%s, %s)",
            (series_name, new_uuid))