def get_all_for_account_between_dates(self, account_id, query_params):
     return self.uow.apply_query_parameters(self.table.get_all(account_id, index='account_id')
                                            .filter(r.row['readingdateutc'].during(
                                                     r.epoch_time(query_params.start_date),
                                                     r.epoch_time(query_params.end_date),
                                                     right_bound='closed')
                                                    ),
                                            query_params)
Beispiel #2
0
    def process_kline(self, msg):
        d = msg['data']
        k = d['k']

        try:
            # TODO check depth not empty
            kline = {
                fields.ID: [k['s'], k['t']],
                fields.EXCHANGE: 'binance',
                fields.EVENT_ID: ['binance', 'feature', d['s'], d['E']],
                fields.EVENT_TIME: d['E'],
                fields.START_TIME: k['t'],
                fields.END_TIME: k['T'],
                fields.EPOCH_START_TIME: r.epoch_time(k['t'] / 1000),
                fields.EPOCH_END_TIME: r.epoch_time(k['T'] / 1000),
                fields.EPOCH_EVENT_TIME: r.epoch_time(d['E'] / 1000),
                fields.SYMBOL: k['s'],
                fields.BASE_ASSET:
                self.derive_base_asset(self.quote_asset, k['s']),
                fields.QUOTE_ASSET: self.quote_asset,
                fields.INTERVAL: k['i'],
                fields.OPEN: float(k['o']),
                fields.CLOSE: float(k['c']),
                fields.HIGH: float(k['h']),
                fields.LOW: float(k['l']),
                fields.VOLUME: float(k['v']),
                fields.TRADES: k['n'],
                fields.QUOTE_ASSET_VOLUME: float(k['q']),
                fields.TAKER_BUY_BASE_ASSET_VOLUME: float(k['V']),
                fields.TAKER_BUY_QUOTE_ASSET_VOLUME: float(k['Q']),
                fields.IS_FINAL: k['x']
            }

            flat_depth = self.get_flat_depth(k['s'])

            kline.update(flat_depth)

            r.table(db_const.FEATURES_TABLE).insert(
                kline, conflict="replace").run(self.conn)

            price = {
                fields.ID: k['s'],
                fields.EVENT_ID: ['binance', 'price', d['s'], d['E']],
                fields.EVENT_TIME: d['E'],
                fields.EPOCH_EVENT_TIME: r.epoch_time(d['E'] / 1000),
                fields.QUOTE_ASSET: self.quote_asset,
                fields.BASE_ASSET:
                self.derive_base_asset(self.quote_asset, k['s']),
                fields.SYMBOL: k['s'],
                fields.PRICE: float(k['c'])
            }
            r.table(db_const.PRICES_TABLE).insert(
                price, conflict="replace").run(self.conn)

        except Exception as e:
            print(e)
Beispiel #3
0
def _get_stop_times_by_vehicle(vehicle, stop_start, stop_end, route, direction, time=arrow.now(), window=45, index='route_direction_vehicle_time'):
    lower_key = [route, direction, vehicle, r.epoch_time(time.replace(minutes=-window).timestamp)]
    upper_key = [route, direction, vehicle, r.epoch_time(time.timestamp)]

    query = r.table('vehicle_stop_times') \
                .between(lower_key, upper_key, index=index) \
                .order_by(index=r.asc(index)) \
                .filter((r.row['stop_id'] == stop_start) | (r.row['stop_id'] == stop_end))

    return list(query.run())
Beispiel #4
0
def _get_vehicles_by_stop(stop, route, direction, time=arrow.now(), window=45, index='route_direction_stop_time'):
    lower_time = r.epoch_time(time.replace(minutes=-window).timestamp)
    upper_time = r.epoch_time(time.timestamp)

    lower_key = [route, direction, stop, lower_time]
    upper_key = [route, direction, stop, upper_time]

    query = r.table('vehicle_stop_times') \
                .between(lower_key, upper_key, index=index) \
                .order_by(index=r.desc(index))

    return list(query.run())
Beispiel #5
0
def test_delete():
    url = 'httpbin.org/delete'
    delete_data = {
        'nested': {
            'arr': [123.45, ['a', 555], 0.123],
            'str': 'info',
            'number': 135
        },
        'time': r.epoch_time(1000),
        'nil': None
    }
    res = r.http(url, method='DELETE', data=delete_data).run(conn)
    expect_eq(res['json']['nested'], delete_data['nested'])
    expect_eq(
        res['json']['time'],
        datetime.datetime(1970,
                          1,
                          1,
                          0,
                          16,
                          40,
                          tzinfo=res['json']['time'].tzinfo))

    delete_data = '<arbitrary> +%data!$%^</arbitrary>'
    res = r.http(url, method='DELETE', data=delete_data).run(conn)
    expect_eq(res['data'], delete_data)
Beispiel #6
0
def clear_role(conn, role_id, update_time):
    """Takes a role ID and update_time and removes all role admins,
    role owners, role members, and base role objects in rethinkDB that are
    older than the update_time.
    """
    # NOTE: created_date in rethink is being overwritten on ingestion by
    #   /rbac/ledger_sync/inbound/listener.py and actually tracks the
    #   time the object was last updated/modified. It is NOT the time the
    #   object was created.

    update_time = r.epoch_time(int(update_time) - 1)
    # remove all old entries in role_attributes with role_id
    (
        r.table("role_members")
        .filter({"role_id": role_id})
        .filter(lambda role_member: role_member["created_date"] < update_time)
        .delete(durability="hard", return_changes=False)
        .run(conn)
    )
    # remove all old ntries in role_admins with role_id
    (
        r.table("role_admins")
        .filter({"role_id": role_id})
        .filter(lambda role_admin: role_admin["created_date"] < update_time)
        .delete(durability="hard", return_changes=False)
        .run(conn)
    )
    # remove all old entries in role_owners with role_id
    (
        r.table("role_owners")
        .filter({"role_id": role_id})
        .filter(lambda role_owner: role_owner["created_date"] < update_time)
        .delete(durability="hard", return_changes=False)
        .run(conn)
    )
Beispiel #7
0
    def quotes(self, sids, start=None, end=None, select=[]):
        #sids = map(str.lower, map(str, sids))
        if start:
            start = rdb.epoch_time(dna.time_utils.UTC_date_to_epoch(start))
        if end:
            # TODO Give a notice when given end is > database end
            end = rdb.epoch_time(dna.time_utils.UTC_date_to_epoch(end))
        else:
            end = rdb.now()
        if select == 'ohlc':
            # TODO Handle 'adjusted_close'
            select = ['open', 'high', 'low', 'close', 'volume']
        if not isinstance(select, list):
            select = [select]

        return self._load_quotes(sids, start, end, select)
Beispiel #8
0
def archive_all(age_h=ARCHIVE_TIMEOUT_H, only_preemptible=True):
    """
    Archive all projects on this host that are pre-emptible and haven't
    been edited in age_h hours.
    Make a bup snapshot of every project that has had a snapshot but no backup
    for at least age_h hours.
    """
    def dbg(*m):
        log("archive_all(%s)" % age_h, *m)

    dbg()
    conn = rethinkdb_connection()
    # Query for projects that are on this storage server, are preemptible, are NOT running,
    # and have not been edited for a while.
    query = rethinkdb.db('smc').table('projects').get_all(
        STORAGE_SERVER, index='storage_server').filter({'run': False})
    if only_preemptible:
        query = query.filter({'preemptible': True})
    if age_h:
        cutoff = time.time() - age_h * 60 * 60
        query = query.filter(
            rethinkdb.row["last_edited"] <= rethinkdb.epoch_time(cutoff))
    query = query.pluck('project_id')
    v = list(query.run(conn))
    dbg("queried database and found %s projects to archive" % v)
    for x in v:
        project_id = x['project_id']
        try:
            archive(project_id)
            dbg("archive_all -- successfully archived", project_id)
        except Exception as err:
            # TODO: we need this to get seen by a human!
            dbg("archive_all - ERROR", project_id, err)
Beispiel #9
0
def do_add_token():
    name = request.form.get('name', None)
    if name is None or len(name) < 5:
        return redirect(url_for('.add_token', error="Missing or too short name (at least 5 chars)"))
    r.table('tokens').insert(
        {'name': name, 'last_used': r.epoch_time(0).to_iso8601(), 'id': generate_token()}).run(g.rdb_conn)
    return redirect('tokens')
Beispiel #10
0
    def quotes(self, sids, start=None, end=None, select=[]):
        #sids = map(str.lower, map(str, sids))
        if start:
            start = rdb.epoch_time(dna.time_utils.UTC_date_to_epoch(start))
        if end:
            # TODO Give a notice when given end is > database end
            end = rdb.epoch_time(dna.time_utils.UTC_date_to_epoch(end))
        else:
            end = rdb.now()
        if select == 'ohlc':
            # TODO Handle 'adjusted_close'
            select = ['open', 'high', 'low', 'close', 'volume']
        if not isinstance(select, list):
            select = [select]

        return self._load_quotes(sids, start, end, select)
Beispiel #11
0
    def new(self, id, token, raw_password=None):
        old_user = {
                'token': token,
                'lolverified': False,
                'draft_points': 5000,
            }
        if raw_password is not None:
            old_user.update({'password': encrypt(raw_password)})
        else:
            old_user.update({'password': None})

        new_user = old_user
        new_user['id'] = id
        delta = datetime.now() - datetime(1970, 1, 1)
        new_user['created'] = r.epoch_time(delta.total_seconds())
        if self.async:
            exists = yield self.exists(id)
            if exists:
                rv = yield self.update(id, old_user)
            else:
                rv = yield self.insert(new_user)
        else:
            if self.exists(id):
                rv = self.update(id, old_user)
            else:
                rv = self.insert(new_user)
        return rv
def remove_expired_tickets():
    conn = yield connection()
    expiration_time = int(CONFIG.get('ticket_expiration'))
    safeDate = datetime.now() - timedelta(seconds=expiration_time)
    safeDate = r.epoch_time(int(safeDate.strftime("%s")))
    result = yield r.table('reservations').\
        filter(r.row['reserved_on'] < safeDate).delete().run(conn)
    return result
def remove_expired_tickets():
    conn = yield connection()
    expiration_time = int(CONFIG.get('ticket_expiration'))
    safeDate = datetime.now() - timedelta(seconds=expiration_time)
    safeDate = r.epoch_time(int(safeDate.strftime("%s")))
    result = yield r.table('reservations').\
        filter(r.row['reserved_on'] < safeDate).delete().run(conn)
    return result
Beispiel #14
0
def rethink_datetime(value):
    """ Converts a datetime.datetime to a rethinkDB compatible datetime """
    if (not isinstance(value, datetime.datetime) or value.year < 1970
            or value.year > 2200):
        return None
    epoch_zero = datetime.datetime(1970, 1, 1, tzinfo=value.tzinfo)
    seconds = (value - epoch_zero).total_seconds()
    return r.epoch_time(seconds * 1000)
Beispiel #15
0
def _get_vehicles_by_stop(stop,
                          route,
                          direction,
                          time=arrow.now(),
                          window=45,
                          index='route_direction_stop_time'):
    lower_time = r.epoch_time(time.replace(minutes=-window).timestamp)
    upper_time = r.epoch_time(time.timestamp)

    lower_key = [route, direction, stop, lower_time]
    upper_key = [route, direction, stop, upper_time]

    query = r.table('vehicle_stop_times') \
                .between(lower_key, upper_key, index=index) \
                .order_by(index=r.desc(index))

    return list(query.run())
Beispiel #16
0
def refresh_quakes():
    conn = r.connect(host=RDB_HOST, port=RDB_PORT, db=RDB_DB)
    r.table("quakes").insert(
        r.http(url)["features"].merge({
            "time": r.epoch_time(r.row["properties"]["time"] / 1000),
            "geometry": r.point(
                r.row["geometry"]["coordinates"][0],
                r.row["geometry"]["coordinates"][1])}),
        conflict="replace").run(conn)
    conn.close()
Beispiel #17
0
def flush(conn, storage_period):
    client = Client("", "")
    time_res = client.get_server_time()
    try:
        if time_res['serverTime'] > 2:
            print("Deleting old records")
            cutoff_time = (time_res['serverTime'] / 1000) - storage_period

            tables = r.table_list().run(conn)

            if db_const.FEATURES_TABLE in tables:
                r.table(db_const.FEATURES_TABLE)\
                .filter(
                    r.row[fields.EPOCH_EVENT_TIME] < r.epoch_time(cutoff_time)
                )\
                .delete()\
                .run(conn)

            if db_const.TRADES_TABLE in tables:
                r.table(db_const.TRADES_TABLE)\
                .filter(
                    r.row[fields.EPOCH_EVENT_TIME] < r.epoch_time(cutoff_time)
                )\
                .delete()\
                .run(conn)

            if db_const.EXECUTION_TABLE in tables:
                r.table(db_const.EXECUTION_TABLE)\
                .filter(
                    r.row[fields.EPOCH_EVENT_TIME] < r.epoch_time(cutoff_time)
                )\
                .delete()\
                .run(conn)

            if db_const.BALANCES_TABLE in tables:
                r.table(db_const.BALANCES_TABLE)\
                .filter(
                    r.row[fields.EPOCH_EVENT_TIME] < r.epoch_time(cutoff_time)
                )\
                .delete()\
                .run(conn)
    except Exception as e:
        print(e)
Beispiel #18
0
def _get_stop_times_by_vehicle(vehicle,
                               stop_start,
                               stop_end,
                               route,
                               direction,
                               time=arrow.now(),
                               window=45,
                               index='route_direction_vehicle_time'):
    lower_key = [
        route, direction, vehicle,
        r.epoch_time(time.replace(minutes=-window).timestamp)
    ]
    upper_key = [route, direction, vehicle, r.epoch_time(time.timestamp)]

    query = r.table('vehicle_stop_times') \
                .between(lower_key, upper_key, index=index) \
                .order_by(index=r.asc(index)) \
                .filter((r.row['stop_id'] == stop_start) | (r.row['stop_id'] == stop_end))

    return list(query.run())
Beispiel #19
0
 def post(self, app=flask.current_app):
     conn = app.rdb.conn
     args = self.create_parser.parse_args()
     game = dict(
         name = args.name,
         updated = r.epoch_time(time.time()),
         players = [None]*args.players
     )
     result = app.games_tbl.insert(game).run(conn)
     game['container_id'] = result['generated_keys'][0]
     return app.games_tbl.get(result['generated_keys'][0]).run(conn)
    def _build_datapoints(self, ids, keys, dt_from, dt_to=None):
        dt_from = rdb.epoch_time(dt_from)
        dt_to = rdb.epoch_time(dt_to) if dt_to else rdb.now()

        data = []
        for portfolio_id in ids:
            for key in keys:
                pfs = rdb.table(portfolio_id).filter(
                    lambda pf: pf['date'].during(
                        dt_from, dt_to)).run(self.session)

                path = key.split('.')
                data.append(
                    {
                        'target': '{}.{}'.format(portfolio_id, path[1]),
                        'datapoints': sorted(
                            [_format_datapoints(pf, key) for pf in pfs],
                            key=lambda point: point[1])
                    })
        return data
Beispiel #21
0
    async def get_frame(self, start, last_time, assets):
        # Create an index for the time range
        time_index = pd.to_datetime(list(range(start, round(last_time), 60)),
                                    unit='s')
        time_index = time_index.round('min')

        # Get the klines for this period
        rec = r.table(self.feature_table)\
        .between(
            r.epoch_time(start),
            r.epoch_time(last_time),
            index='epochEventTime'
        )\
        .pluck(
            'baseAsset',
            'startTime',
            'close',
            'open',
            'high',
            'low'
        )\
        .filter(lambda doc:
            r.expr(assets)
                .contains(doc["baseAsset"])
        )\
        .run(self.conn)

        f = pd.DataFrame(rec)
        f['startTime'] = pd.to_datetime(f['startTime'], unit='ms')
        f.set_index(self.index_columns, inplace=True)

        # Reindex the dataframe based upon
        ind = pd.MultiIndex.from_product([f.index.levels[0], time_index],
                                         names=self.index_columns)
        f = f.reindex(ind)

        # Fill non existent values
        f=f.fillna(axis=0, method="ffill")\
           .fillna(axis=0, method="bfill")

        return f
def remove_expired_tickets():
    conn = yield connection()
    expiration_time = int(CONFIG.get("ticket_expiration"))
    safeDate = datetime.now() - timedelta(seconds=expiration_time)
    safeDate = r.epoch_time(int(safeDate.strftime("%s")))
    result = (
        yield r.table("reservations")
        .filter((r.row["confirmation_code"] == "") & (r.row["reserved_on"] < safeDate))
        .delete()
        .run(conn)
    )
    return result
Beispiel #23
0
def quakesJSON():
    conn = r.connect(**config["database"])
    output = r.table("quakes") \
                .group(r.epoch_time(r.row["properties"]["time"] / 1000).date()) \
                .ungroup().merge({"count": r.row["reduction"].count()}) \
                .run(conn)

    conn.close()
    return json.dumps([
        OrderedDict([["date", item["group"].strftime("%D")],
                     ["count", item["count"]]]) for item in output
    ])
Beispiel #24
0
 async def get_normalised_features(self, start, last_time, assets):
     rec = r.table(self.feature_table)\
     .between(
         r.epoch_time(start),
         r.epoch_time(last_time),
         index=fields.EPOCH_EVENT_TIME
     )\
     .pluck(
         fields.BASE_ASSET,
         fields.QUOTE_ASSET,
         fields.CLOSE,
         fields.OPEN,
         fields.HIGH,
         fields.LOW,
         fields.VOLUME,
         fields.TRADES
     )\
     .filter(lambda doc:
         r.expr(assets)
             .contains(doc[fields.BASE_ASSET])
     )\
     .run(self.conn)
Beispiel #25
0
    def runTest(self):
        c = r.connect(port=self.port)

        r.db('test').table_create('times').run(c)

        time1 = 1375115782.24
        rt1 = r.epoch_time(time1).in_timezone('+00:00')
        dt1 = datetime.datetime.fromtimestamp(time1, r.ast.RqlTzinfo('+00:00'))
        time2 = 1375147296.68
        rt2 = r.epoch_time(time2).in_timezone('+00:00')
        dt2 = datetime.datetime.fromtimestamp(time2, r.ast.RqlTzinfo('+00:00'))

        res = r.table('times').insert({'id':0,'time':rt1}).run(c)
        self.assertEqual(res['inserted'], 1)
        res = r.table('times').insert({'id':1,'time':rt2}).run(c)
        self.assertEqual(res['inserted'], 1)

        expected_row1 = {'id':0,'time':dt1}
        expected_row2 = {'id':1,'time':dt2}

        groups = r.table('times').group('time').coerce_to('array').run(c)
        self.assertEqual(groups, {dt1:[expected_row1],dt2:[expected_row2]})
Beispiel #26
0
    async def _get_top_assets(self,
                              asset_num,
                              start_time,
                              end_time,
                              selection_method='s2vol'):
        q = r.table(self.feature_table)\
        .between(
            r.epoch_time(start_time),
            r.epoch_time(end_time),
            index='epochEventTime'
        )

        if selection_method == "s2vol":
            taq = self.get_top_assets_by_s2vol(q, asset_num)
        elif selection_method == "s2":
            taq = self.get_top_assets_by_s2vol(q, asset_num)
        else:
            raise ValueError("Selection method not valid")

        top_assets = taq.run(self.conn)

        return top_assets
Beispiel #27
0
    def runTest(self):
        c = r.connect(port=self.port)

        r.db('test').table_create('times').run(c)

        time1 = 1375115782.24
        rt1 = r.epoch_time(time1).in_timezone('+00:00')
        dt1 = datetime.datetime.fromtimestamp(time1, r.ast.RqlTzinfo('+00:00'))
        time2 = 1375147296.68
        rt2 = r.epoch_time(time2).in_timezone('+00:00')
        dt2 = datetime.datetime.fromtimestamp(time2, r.ast.RqlTzinfo('+00:00'))

        res = r.table('times').insert({'id': 0, 'time': rt1}).run(c)
        self.assertEqual(res['inserted'], 1)
        res = r.table('times').insert({'id': 1, 'time': rt2}).run(c)
        self.assertEqual(res['inserted'], 1)

        expected_row1 = {'id': 0, 'time': dt1}
        expected_row2 = {'id': 1, 'time': dt2}

        groups = r.table('times').group('time').coerce_to('array').run(c)
        self.assertEqual(groups, {dt1: [expected_row1], dt2: [expected_row2]})
Beispiel #28
0
 def new(self, user, match, team, points):
     delta = datetime.now() - datetime(1970, 1, 1)
     wager = {
         'created': r.epoch_time(delta.total_seconds()),
         'user': user,
         'match': match,
         'team': team,
         'points': points,
         'settled': False,
         'payout': 0,
         'match_data': None,
     }
     rv = yield self.insert(wager)
     return rv
def on_message(client, userdata, msg):
    try:
        payload = urlparse.parse_qs(urlparse.urlsplit(msg.payload).path)
        payload = {key:value for key,value in ((key,dict(zip(["value", "unit"], values[0].split(" ")))) for key, values in payload.iteritems())}
        payload.pop("heap", None)
        doc = dict(zip(["meter_id", "unix_time"],[int(val) for val in msg.topic.split("/")[3:]]))
        doc['unix_time'] = r.epoch_time(doc["unix_time"])
        doc["payload"] = payload
        if payload:
           res = r_table.insert(doc).run()
           if not res["inserted"] == 1:
               msg = "not inserted! topic:{} payload:{}".format(msg.topic, msg.payload)
               raise ValueError(msg)
    except Exception:
        logger.exception(msg.topic)
Beispiel #30
0
def pre_filter(resource):
    """ Filter or modifies values prior to writing them to the rethink sync tables
        1. Changes dates from Int64 to a DateTime (Int64 would otherwise get translated to a string)
    """
    keys = [key for key in resource]
    for key in keys:
        if key.endswith("_date"):
            try:
                value = resource[key]
                if value and int(value) != 0:
                    resource[key] = r.epoch_time(int(value))
                else:
                    del resource[key]
            except Exception:  # pylint: disable=broad-except
                del resource[key]
Beispiel #31
0
def api_dashboard():
	""" Returns data for dashboards """

	# Get data for timeline
	analysis_count_graph = []
	for i in range(7):
		start = arrow.utcnow().replace(days=-i).floor('day').timestamp
		end = arrow.utcnow().replace(days=-i).ceil('day').timestamp
		count = r.table('analysis').filter((r.row['date'] < r.epoch_time(end)) & (r.row['date'] > r.epoch_time(start))).count().run(flask.g.rdb_conn)
		# JavaScript Date object expects timestamp * 1000
		analysis_count_graph.append([(start * 1000), count])

	return flask.jsonify(
		analysis_graph=analysis_count_graph[::-1],
		analysis_count=r.table('analysis').count().run(flask.g.rdb_conn),
		success_count=r.table('analysis').filter(r.row['job_status']['state'] == 'SUCCESS').count().run(flask.g.rdb_conn),
		progress_count=r.table('analysis').filter(r.row['job_status']['state'] == 'PROGRESS').count().run(flask.g.rdb_conn),
		pending_count=r.table('analysis').filter(r.row['job_status']['state'] == 'PENDING').count().run(flask.g.rdb_conn),
		error_count=r.table('analysis').filter(r.row['job_status']['state'] == 'ERROR').count().run(flask.g.rdb_conn),
		exec_time=round(r.table('analysis').avg('exec_time').default(0).run(flask.g.rdb_conn))
	)
Beispiel #32
0
def clean_movement_message(msg, msg_type, conn):
    extras = {'type': msg_type}
    body = msg['body']

    for key in body.keys():
        if key.endswith('_stanox'):
            logger.debug('Train {}: Lookup stanox {} for field {}'.format(body['train_id'], body[key], key))
            extras = merge(extras, get_geo(body[key], key[:-len('_stanox')], conn))

        if key.endswith('_timestamp'):
            try:
                logger.debug('Converting timestamp for field {}'.format(key))
                intval = int(body[key])
                extras[key] = r.epoch_time(intval / 1000.0)
            except:
                pass

        if body[key] == 'true' or body[key] == 'false':
            extras[key] = bool(body[key] == 'true')

    return merge(body, extras)
Beispiel #33
0
import rethinkdb as r

r.connect('localhost', 28015).repl()

r.table('waitingTweets').filter(r.row['created_at'].to_epoch_time() <= (
    r.now() - r.epoch_time(172800)).run()).delete().run()
Beispiel #34
0
# for tweet in tweets_json:
for tweet in collection:
    if compteur % 100 == 0:
        print(compteur)
    compteur = compteur + 1
    circle1 = r.circle([
        tweet["coordinates"]["coordinates"][0],
        tweet["coordinates"]["coordinates"][1]
    ],
                       1000,
                       unit='m')
    events = r.table('events').get_intersecting(
        circle1, index='coordinates').filter(
            r.row['startDate'].to_epoch_time() > (tweet["created_at"] -
                                                  r.epoch_time(172800))).run()
    listEvents = list(events)
    if len(listEvents) > 0:
        for i in range(len(listEvents)):
            if "extended_tweet" in tweet.keys():
                clean = clean_tweet(tweet["extended_tweet"]["full_text"],
                                    tweet["extended_tweet"]["entities"])
            else:
                clean = clean_tweet(tweet["text"], tweet["entities"])
            stem_words = stemTweet(clean)
            if len(stem_words) > 0:
                if get_tweet_ranking_score(listEvents[0]["wordsList"],
                                           listEvents[0]["wordsCount"],
                                           stem_words) > 2:
                    update_event(listEvents[0], tweet)
                    # delete the old event
Beispiel #35
0
def test_put():
    url = 'httpbin.org/put'
    put_data = {'nested':{'arr':[123.45, ['a', 555], 0.123], 'str':'info','number':135,'nil':None},'time':r.epoch_time(1000)}
    res = r.http(url, method='PUT', data=put_data).run(conn)
    expect_eq(res['json']['nested'], put_data['nested'])
    expect_eq(res['json']['time'], datetime.datetime(1970, 1, 1, 0, 16, 40, tzinfo=res['json']['time'].tzinfo))

    put_data = '<arbitrary> +%data!$%^</arbitrary>'
    res = r.http(url, method='PUT', data=put_data).run(conn)
    expect_eq(res['data'], put_data)
Beispiel #36
0
)

LOGGER = get_default_logger(__name__)
SERVER = Server("my_fake_server", get_info=OFFLINE_AD_2012_R2)
OUTBOUND_ENTRY_CASES = [
    (
        {
            "data": {
                "description":
                "The role that keeps on rolling",
                "members": [],
                "remote_id":
                "CN=Rolling_Role,OU=Roles,OU=Security,OU=Groups,DC=AD2012,DC=LAB",
            },
            "data_type": "group",
            "timestamp": r.epoch_time(1376074395.012),
            "provider_id": "test_provider",
            "status": "UNCONFIRMED",
            "action": "",
        },
        {
            "data": {
                "description":
                "The role that keeps on rolling",
                "members": [],
                "remote_id":
                "CN=Rolling_Role,OU=Roles,OU=Security,OU=Groups,DC=AD2012,DC=LAB",
            },
            "data_type": "group",
            "timestamp": dt.fromtimestamp(1376074395.012, tz=pytz.utc),
            "provider_id": "test_provider",
Beispiel #37
0
 def getTime(self):
     time = r.epoch_time((self.updated_at['epoch_time'])).run()
     return time
Beispiel #38
0
def chan_message(channel, message):
    """
    If you GET this endpoint, go to /api/v1/channel/<channel>/message/<message>
    with <channel> replaced for the channel you want and <message> for the
    the message ID you want to look up. Will return the raw packet for the
    message as well, unlike /channel/<channel>/message

    If you POST this endpoint:
        Go to /api/v1/channel/<channel>/message/<message> with <channel>
        for the channel wanted & <message> replaced for the ID of the message
        you want to edit or create

        Parameters:
            - message:      Raw message contents in string
            - timestamp:    A Unix-epoch int of when the message was sent
            - userId:       The user ID who sent the message
            - packet:       The raw JSON packet in string form from Beam
    """

    model = "Message"
    errors = []

    required_parameters = ["message", "timestamp", "userId", "packet"]

    for param in request.values:
        if param not in required_parameters:
            errors.append(
                generate_error(
                    uid=uuid4(),
                    status="400",
                    title="Incorrect parameters for endpoint",
                    detail="Missing required {} parameter".format(param),
                    source={"pointer": request.path}
                )
            )

    channel = int(channel) if channel.isdigit() else channel

    fields = {"channelId": channel, "messageId": message}

    data = {
        key: request.values.get(key) for key in request.values
    }
    data.update(**fields)
    data.update({
        "createdAt": rethink.epoch_time(
            int(request.values.get("timestamp", time.time()))
        )
    })

    data = {key: unescape(data[key]) for key in data
            if isinstance(data[key], str)}

    response = generate_response(
        model,
        request.path,
        request.method,
        request.values,
        data=data,
        fields=fields
    )

    packet = response[0]

    return make_response(jsonify(response[0]), response[1])
Beispiel #39
0
 def getTime(self):
     time = r.epoch_time((self.updated_at['epoch_time'])).run()
     return time
Beispiel #40
0
import random
import requests
import rethinkdb as r
from telegram import Bot
from config import TELEGRAM_BOT_TOKEN, DB_HOST, DB_PORT, DB_NAME

db = r.connect(DB_HOST, DB_PORT, DB_NAME)
bot = Bot(TELEGRAM_BOT_TOKEN)

for user in r.table('users').run(db):
    for subscribe in user['value']:
        express = r.table('traces').get(subscribe).run(db)
        if express['state'] in ['3', '4']:
            continue
        data = requests.get('http://m.kuaidi100.com/query?type=%s&postid=%s&id=1&valicode=&temp=%s' % (
            express['com'], express['id'], str(random.random()))).json()
        if len(express['data']) == len(data['data']):
            continue
        data['id'] = data['nu']
        del data['nu']
        for each in data['data']:
            each['time'] = r.epoch_time(time.mktime(time.strptime(each['time'], '%Y-%m-%d %H:%M:%S')))
        r.table('traces').get(express['id']).update(data).run(db)
        text = '快递 {0} 有更新\n\n'.format(data['id'])
        for i in data['data'][:len(data['data']) - len(express['data'])]:
            text += '{0}\n{1}\n'.format(
                i['context'],
                i['ftime']
            )
        bot.sendMessage(user['id'], text)
Beispiel #41
0
def updateDB(json_data):
    json_data['id'] = json_data['nu']
    del json_data['nu']
    for each in json_data['data']:
        each['time'] = r.epoch_time(time.mktime(time.strptime(each['time'], '%Y-%m-%d %H:%M:%S')))
    r.table('traces').get(json_data['id']).update(json_data).run(db)