Beispiel #1
0
 def interpolate(self, resolution, missing=False, limit=None):
     self.logger.debug('Interpolating some data')
     result = self.raw.copy()
     first, last = math.ceil(
         min(self.raw['timestamp']) / resolution) * resolution, math.floor(
             max(self.raw['timestamp']) / resolution) * resolution
     self.logger.debug("from %s to %s" %
                       tuple(utils.datetime_from_timestamp([first, last])))
     result['timestamp'] = np.arange(first, last, resolution)
     result['value'] = np.interp(result['timestamp'], self.raw['timestamp'],
                                 self.raw['value'])
     result['datetime'] = utils.datetime_from_timestamp(result['timestamp'])
     if missing:
         if limit == None: limit = resolution * 2
         mymissing = np.array([False] * len(result['timestamp']),
                              dtype=bool)
         mygaps = self.gaps(self.raw['timestamp'], limit)
         for g in mygaps:
             self.logger.debug("Flagging gap %s as missing" % g)
             a = (result['timestamp'] >= g['from']) & (result['timestamp']
                                                       <= g['to'])
             mymissing[a] = True
         result['missing'] = mymissing
         result['gaps'] = mygaps
     result['interpolated'] = {
         'resolution': resolution,
         'missing': missing,
         'limit': limit
     }
     return result
Beispiel #2
0
def process_comment(comment):
    body = comment['body'].lower().strip()

    single_trigger_found = False
    single_string_found = False
    single_date_found = False
    if trigger_in_text(body, trigger_single):
        single_trigger_found = True

    if single_trigger_found:
        time_string, target_date = parse_comment(
            comment['body'], trigger_single,
            utils.datetime_from_timestamp(comment['created_utc']))
        if time_string is not None:
            single_string_found = True
        if target_date is not None:
            single_date_found = True

    split_trigger_found = False
    split_string_found = False
    split_date_found = False
    if trigger_start_of_line(body, trigger_split):
        split_trigger_found = True

    if split_trigger_found:
        time_string, target_date = parse_comment(
            comment['body'], trigger_split,
            utils.datetime_from_timestamp(comment['created_utc']))
        if time_string is not None:
            split_string_found = True
        if target_date is not None:
            split_date_found = True

    return single_trigger_found, single_string_found, single_date_found, split_trigger_found, split_string_found, split_date_found
def user_stats(connection=None, cursor=None):
    if connection is None or cursor is None:
        connection, cursor = utils.connect_to_db(
            "study", cursor_type=psycopg2.extras.DictCursor)

    query_string = sql.SQL("""
      SELECT COUNT(p.place_id) as nb_places, s.ip_address, s.session_start, s.session_end
      FROM places p JOIN sessions s ON s.session_id = p.session_id
      GROUP BY s.session_id, s.ip_address, s.session_start, s.session_end;""")
    cursor.execute(query_string)
    res = []
    for record in cursor:
        res.append({
            'nb_places':
            record['nb_places'],
            'ip_address':
            record['ip_address'],
            'start':
            'N/A' if record['session_start'] is None else
            utils.datetime_from_timestamp(
                record['session_start']).strftime("%Y-%m-%d %H:%M:%S"),
            'end':
            'N/A' if record['session_end'] is None else
            utils.datetime_from_timestamp(
                record['session_end']).strftime("%Y-%m-%d %H:%M:%S")
        })

    return res
Beispiel #4
0
def process_cakeday_message(message, reddit, database):
    log.info("Processing cakeday")

    if database.user_has_cakeday_reminder(message.author.name):
        log.info("Cakeday already exists")
        return ["It looks like you already have a cakeday reminder set."
                ], False

    next_anniversary = utils.get_next_anniversary(message.author.created_utc)

    reminder = Reminder(source=utils.message_link(message.id),
                        message=static.CAKEDAY_MESSAGE,
                        user=database.get_or_add_user(message.author.name),
                        requested_date=utils.datetime_from_timestamp(
                            message.created_utc),
                        target_date=next_anniversary,
                        recurrence="1 year",
                        defaulted=False)

    database.add_reminder(reminder)
    database.commit()

    log.info(
        f"Cakeday reminder created: {reminder.id} : {utils.get_datetime_string(reminder.target_date)}"
    )

    bldr = reminder.render_message_confirmation(
        None, pushshift_minutes=reddit.get_effective_pushshift_lag())
    return [''.join(bldr)], True
Beispiel #5
0
def process_remind_me(message, database):
    log.info("Processing RemindMe message")
    time = utils.find_reminder_time(message.body)

    message_text = utils.find_reminder_message(message.body)

    reminder = Reminder(
        source=utils.message_link(message.id),
        message=message_text,
        user=message.author.name,
        requested_date=utils.datetime_from_timestamp(message.created_utc),
        time_string=time,
        timezone=database.get_settings(message.author.name).timezone)
    if not reminder.valid:
        log.debug("Reminder not valid, returning")
        return [reminder.result_message]

    if not database.save_reminder(reminder):
        log.info("Something went wrong saving the reminder")
        return ["Something went wrong saving the reminder"]

    log.info(
        f"Reminder created: {reminder.db_id} : {utils.get_datetime_string(reminder.target_date)}"
    )

    return reminder.render_message_confirmation()
Beispiel #6
0
def parse_comment(comment, database, count_string):
    if comment['author'] == static.ACCOUNT_NAME:
        log.debug("Comment is from remindmebot")
        return None

    log.info(
        f"{count_string}: Processing comment {comment['id']} from u/{comment['author']}"
    )
    body = comment['body'].lower()
    if f"{static.TRIGGER_LOWER}!" not in body and f"!{static.TRIGGER_LOWER}" not in body:
        log.debug("Command not in comment")
        return None

    time = utils.find_reminder_time(comment['body'])

    message_text = utils.find_reminder_message(comment['body'])

    reminder = Reminder(
        source=utils.reddit_link(comment['permalink']),
        message=message_text,
        user=comment['author'],
        requested_date=utils.datetime_from_timestamp(comment['created_utc']),
        time_string=time,
        timezone=database.get_settings(comment['author']).timezone)
    if not reminder.valid:
        return None

    if not database.save_reminder(reminder):
        reminder.result_message = "Something went wrong saving the reminder"
        reminder.valid = False
        log.warning(reminder.result_message)

    return reminder
Beispiel #7
0
def process_remind_me(message, reddit, database, recurring):
    log.info("Processing RemindMe message")
    trigger = static.TRIGGER_RECURRING_LOWER if recurring else static.TRIGGER_LOWER
    time = utils.find_reminder_time(message.body, trigger)

    message_text = utils.find_reminder_message(message.body, trigger)

    reminder, result_message = Reminder.build_reminder(
        source=utils.message_link(message.id),
        message=message_text,
        user=database.get_or_add_user(message.author.name),
        requested_date=utils.datetime_from_timestamp(message.created_utc),
        time_string=time,
        recurring=recurring)
    if reminder is None:
        log.debug("Reminder not valid, returning")
        return [result_message], False

    database.add_reminder(reminder)
    database.commit()

    log.info(
        f"Reminder created: {reminder.id} : {utils.get_datetime_string(reminder.target_date)}"
    )

    bldr = reminder.render_message_confirmation(
        result_message, pushshift_minutes=reddit.get_effective_pushshift_lag())
    return [''.join(bldr)], True
def checkUnreadMessages(workers=10):
	lastSeenKey = "messages"
	lastSeen = utils.get_last_seen(lastSeenKey)
	lastMessageAt = utils.get_last_seen(lastSeenKey, True);
	
	inbox = config.reddit.inbox
	pool = ThreadPoolExecutor(max_workers=workers)
	messages = list(inbox.unread(limit=None))

	for item in messages:
		isComment = isinstance(item, Comment)

		createdAt = item.created_utc
		if lastSeen >= utils.datetime_from_timestamp(createdAt):
			break;

		if createdAt > lastMessageAt:
			logging.info(f"Found new last message: {utils.datetime_from_timestamp(createdAt)}")
			lastMessageAt = createdAt;

		if item.new:
			logging.info(f"Sending unread item to be processed: itemCreatedAt={createdAt}")
			pool.submit(actions.processUnreadItem, (item))
	
	if messages:
		utils.set_last_seen(lastSeenKey, lastMessageAt)
		inbox.mark_read(messages)

	return pool
Beispiel #9
0
    def get_keyword_comments(self, keyword, last_seen):
        if not len(self.processed_comments.list):
            last_seen = last_seen + timedelta(seconds=1)

        log.debug(
            f"Fetching comments for keyword: {keyword} : {utils.get_datetime_string(last_seen)}"
        )
        url = f"https://api.pushshift.io/reddit/comment/search?q={keyword}&limit=100&sort=desc"
        try:
            json = requests.get(url,
                                headers={'User-Agent': static.USER_AGENT},
                                timeout=10)
            if json.status_code != 200:
                log.warning(
                    f"Could not parse data for search term: {keyword} status: {str(json.status_code)}"
                )
                return []
            comments = json.json()['data']

            if self.timeout_warn_threshold > 1:
                log.warning(
                    f"Recovered from timeouts after {self.consecutive_timeouts} attempts"
                )

            self.consecutive_timeouts = 0
            self.timeout_warn_threshold = 1

        except requests.exceptions.ReadTimeout:
            self.consecutive_timeouts += 1
            if self.consecutive_timeouts >= pow(self.timeout_warn_threshold,
                                                2) * 5:
                log.warning(
                    f"{self.consecutive_timeouts} consecutive timeouts for search term: {keyword}"
                )
                self.timeout_warn_threshold += 1
            return []

        except Exception as err:
            log.warning(f"Could not parse data for search term: {keyword}")
            log.warning(traceback.format_exc())
            return []

        if not len(comments):
            log.warning(f"No comments found for search term: {keyword}")
            return []

        result_comments = []
        for comment in comments:
            date_time = utils.datetime_from_timestamp(comment['created_utc'])
            if last_seen > date_time:
                break

            if not self.processed_comments.contains(comment['id']):
                result_comments.append(comment)

        log.debug(f"Found comments: {len(result_comments)}")
        return result_comments
Beispiel #10
0
def process_comments(reddit, database):
    comments = reddit.get_keyword_comments(
        static.TRIGGER_COMBINED,
        database_get_seen(database).replace(tzinfo=None))

    counters.pushshift_delay.labels(client="prod").set(
        reddit.pushshift_prod_client.lag_minutes())
    counters.pushshift_delay.labels(client="beta").set(
        reddit.pushshift_beta_client.lag_minutes())
    counters.pushshift_delay.labels(client="auto").set(
        reddit.get_effective_pushshift_lag())

    if reddit.recent_pushshift_client == PushshiftType.PROD:
        counters.pushshift_client.labels(client="prod").set(1)
        counters.pushshift_client.labels(client="beta").set(0)
    elif reddit.recent_pushshift_client == PushshiftType.BETA:
        counters.pushshift_client.labels(client="prod").set(0)
        counters.pushshift_client.labels(client="beta").set(1)
    else:
        counters.pushshift_client.labels(client="prod").set(0)
        counters.pushshift_client.labels(client="beta").set(0)

    counters.pushshift_failed.labels(
        client="prod").set(1 if reddit.pushshift_prod_client.failed() else 0)
    counters.pushshift_failed.labels(
        client="beta").set(1 if reddit.pushshift_beta_client.failed() else 0)

    counters.pushshift_seconds.labels("prod").observe(
        reddit.pushshift_prod_client.request_seconds)
    counters.pushshift_seconds.labels("beta").observe(
        reddit.pushshift_beta_client.request_seconds)

    if len(comments):
        log.debug(f"Processing {len(comments)} comments")
    i = 0
    for comment in comments[::-1]:
        i += 1
        mark_read = True
        try:
            process_comment(comment, reddit, database, f"{i}/{len(comments)}")
        except Exception as err:
            mark_read = not utils.process_error(
                f"Error processing comment: {comment['id']} : {comment['author']}",
                err, traceback.format_exc())

        if mark_read:
            reddit.mark_keyword_comment_processed(comment['id'])
            database_set_seen(
                database,
                utils.datetime_from_timestamp(comment['created_utc']))
        else:
            return i

    return len(comments)
    def to_dictionary_slow(self):
        dictionary = vars(self).copy()
        # remove trailing underscores
        tmp = {(k[1:] if k.startswith("_") else k): dictionary[k] for k in dictionary.keys()}
        dictionary = tmp
        del dictionary["orders"]
        if "signals" in dictionary:
            del dictionary["signals"]
        dictionary["strategy"] = dictionary["strategy"].get_short_summary()
        dictionary["utilized_signals"] = ", ".join(get_distinct_signal_types(self.order_signals))
        dictionary["start_time"] = datetime_from_timestamp(dictionary["start_time"])
        dictionary["end_time"] = datetime_from_timestamp(dictionary["end_time"])
        dictionary["mean_buy_sell_pair_return"] = self.mean_buy_sell_pair_return

        dictionary["transaction_currency"] = self._end_crypto_currency
        if "horizon" not in vars(self._strategy):
            dictionary["horizon"] = "N/A"
        else:
            dictionary["horizon"] = self._strategy.horizon.name

        if self.end_price == None:
            dictionary["profit"] = "N/A"
            dictionary["profit_percent"] = "N/A"
            dictionary["profit_USDT"] = "N/A"
            dictionary["profit_percent_USDT"] = "N/A"
        else:
            try:
                dictionary["profit"] = self.profit
                dictionary["profit_percent"] = self.profit_percent
                dictionary["profit_USDT"] = self.profit_usdt
                dictionary["profit_percent_USDT"] = self.profit_percent_usdt
            except NoPriceDataException:
                logging.error("No price data!")
                dictionary["profit"] = "N/A"
                dictionary["profit_percent"] = "N/A"
                dictionary["profit_USDT"] = "N/A"
                dictionary["profit_percent_USDT"] = "N/A"
        return dictionary
Beispiel #12
0
 def __str__(self):
     delta_currency, delta_cash = self.execute()
     return "{0}  \t {1: <16} \t delta_cash -> {2:13.2f} {3} \t " \
            "delta_currency -> {4:13.6f} {5} \t (1 {6} = {7:.8f} {8} {9}), slippage = {10:.2f}". format(
             datetime_from_timestamp(self.timestamp),
             self.order_type,
             delta_cash,
             self.counter_currency,
             delta_currency,
             self.transaction_currency,
             self.transaction_currency,
             self.unit_price,
             self.counter_currency,
             " -> delayed trading with delay = {} seconds, original price = {}".
                 format(self.time_delay, self.original_price) if self.time_delay != 0 else "",
             self.slippage
             )
Beispiel #13
0
def check_curriculum_updated(uid):
    """Checks if the curriculum has updated within `CURR_UPDATE_TIME_LIMIT`\n
    Return: `bool`"""
    res = db.session.execute(
        text('SELECT * FROM Course.curriculum_check WHERE uid=:uid'),
        {'uid': uid})
    if res.rowcount:
        if res.rowcount > 1:
            raise RuntimeError('More than one uid existed in curriculum_check')
        p = res.fetchone()  # person
        dt = utils.datetime_from_timestamp(p['time'])
        now = utils.datetime_now()
        if DISABLE_CURR_UPDATE_TIME_LIMIT:
            return False
        return now - dt < CURR_UPDATE_TIME_LIMIT
    else:
        return False
Beispiel #14
0
    def load_schedule_data_if_not_expired(uid):
        """Load the schedule data in dict if not expired"""
        # Get time and time check
        user = models.get_user(uid)
        # if the schedule needed to update (course insert/delete)
        if user['schedule_data_changed']:
            db.session.execute(text('UPDATE user SET schedule_data_changed=0 WHERE uid=:uid'), {'uid': uid})
            db.session.commit()
            return None
        # time expired?
        last = user['schedule_data_time']
        if not last:
            return None
        last = utils.datetime_from_timestamp(last)

        if not Auto_course_insert.DISABLE_TIME_LIMIT and utils.time_limit_check(last, Auto_course_insert.TIME_LIMIT):
            d = Auto_course_insert.load_schedule_data(uid)
            if d:
                return json.loads(d)
        return None
Beispiel #15
0
    def clean(self, data, sd_limit):
        """
        The ConsumptionCleaner class filters extreme data based on variations in the rate of consumption
        >>> from pyEMIS.DataAccess import adapters
        >>> from pyEMIS.DataCleaning import cleaners
        >>> import numpy as np
        >>> test = adapters.Test()
        >>> cleaner = cleaners.ConsumptionCleaner()
        >>> cleaner #doctest: +ELLIPSIS
        <pyEMIS.DataCleaning.cleaners.ConsumptionCleaner object at 0x...>
        >>> valid = test.timeseries('valid')
        >>> clean = cleaner.clean(valid, 30)
        >>> clean['datetime'] == valid['datetime']
        True
        >>> np.testing.assert_array_almost_equal(clean['value'][1:], valid['value'][1:])
        """
        self.logger.debug('cleaning process started')
        clean = data.copy()
        ts = utils.timestamp_from_datetime(clean['datetime'])
        if ts.size == 0:
            raise NoDataError("Can't clean an empty dataset")
        value = clean['value']

        ts, value = self._remove_invalid_dates(ts, value)
        ts, value = self._trimmed_ends(ts, value)

        if not clean['integ']:
            self.logger.debug('converting to integ')
            value = utils.integ_from_movement(value)

        ts, value = self._remove_extremes(ts, value, sd_limit)

        if not clean['integ']:
            self.logger.debug('converting back to movement')
            value = utils.movement_from_integ(value)

        clean['timestamp'] = ts
        clean['datetime'] = utils.datetime_from_timestamp(ts)
        clean['value'] = value
        clean['cleaned'] = {'sd_limit': sd_limit}            
        return clean
Beispiel #16
0
def process_comments(reddit, database):
    comments = reddit.get_keyword_comments(static.TRIGGER_COMBINED,
                                           database_get_seen(database))
    if len(comments):
        log.debug(f"Processing {len(comments)} comments")
    i = 0
    for comment in comments[::-1]:
        i += 1
        try:
            process_comment(comment, reddit, database, f"{i}/{len(comments)}")
        except Exception:
            log.warning(
                f"Error processing comment: {comment['id']} : {comment['author']}"
            )
            log.warning(traceback.format_exc())

        reddit.mark_keyword_comment_processed(comment['id'])
        database_set_seen(
            database, utils.datetime_from_timestamp(comment['created_utc']))

    return len(comments)
Beispiel #17
0
def process_cakeday_message(message, database):
    log.info("Processing cakeday")

    if database.get_cakeday(message.author.name) is not None:
        log.info("Cakeday already exists")
        return ["It looks like you already have a cakeday reminder set."]

    account_created = utils.datetime_from_timestamp(message.author.created_utc)
    next_anniversary = utils.add_years(
        account_created,
        utils.datetime_now().year - account_created.year)
    if next_anniversary < utils.datetime_now():
        next_anniversary = utils.add_years(next_anniversary, 1)
    log.debug(
        f"u/{message.author.name} created {utils.get_datetime_string(account_created)}, "
        f"anniversary {utils.get_datetime_string(next_anniversary)}")

    cakeday = Cakeday(message.author.name, next_anniversary)
    database.add_cakeday(cakeday)

    return cakeday.render_confirmation(
        database.get_settings(message.author.name).timezone)
Beispiel #18
0
 def authored_by(self):
   return (
     self.transactions[0]['authorPHID'],
     datetime_from_timestamp(self.transactions[0]['dateCreated'])
   )
Beispiel #19
0
 def __str__(self):
     return ("{} {}-{} strength={} trend={} horizon={} timestamp={} rsi_value={}".format(self.signal_signature, self.transaction_currency, self.counter_currency, self.strength_value,
                                                          self.trend, self.horizon, datetime_from_timestamp(self.timestamp), self.rsi_value))
Beispiel #20
0
def parse_comment(comment, database, count_string, reddit):
    if comment['author'] == static.ACCOUNT_NAME:
        log.debug("Comment is from remindmebot")
        return None, None
    if comment['author'] in static.BLACKLISTED_ACCOUNTS:
        log.debug("Comment is from a blacklisted account")
        return None, None

    log.info(
        f"{count_string}: Processing comment {comment['id']} from u/{comment['author']}"
    )
    body = comment['body'].lower().strip()
    recurring = False
    cakeday = False
    allow_default = True
    if trigger_in_text(body, static.TRIGGER_RECURRING_LOWER):
        log.debug("Recurring reminder comment")
        recurring = True
        trigger = static.TRIGGER_RECURRING_LOWER
    elif trigger_in_text(body, static.TRIGGER_LOWER):
        log.debug("Regular comment")
        trigger = static.TRIGGER_LOWER
    elif trigger_start_of_line(body, static.TRIGGER_CAKEDAY_LOWER):
        log.debug("Cakeday comment")
        cakeday = True
        recurring = True
        trigger = static.TRIGGER_CAKEDAY_LOWER
    elif trigger_start_of_line(body, static.TRIGGER_SPLIT_LOWER):
        log.debug("Regular split comment")
        trigger = static.TRIGGER_SPLIT_LOWER
        allow_default = False
    else:
        log.debug("Command not in comment")
        return None, None

    target_date = None
    if cakeday:
        if database.user_has_cakeday_reminder(comment['author']):
            log.info("Cakeday already exists")
            return None, None

        target_date = utils.get_next_anniversary(
            reddit.get_user_creation_date(comment['author']))
        message_text = static.CAKEDAY_MESSAGE
        time = "1 year"

    else:
        time = utils.find_reminder_time(comment['body'], trigger)
        message_text = utils.find_reminder_message(comment['body'], trigger)

    reminder, result_message = Reminder.build_reminder(
        source=utils.reddit_link(comment['permalink']),
        message=message_text,
        user=database.get_or_add_user(comment['author']),
        requested_date=utils.datetime_from_timestamp(comment['created_utc']),
        time_string=time,
        recurring=recurring,
        target_date=target_date,
        allow_default=allow_default)
    if reminder is None:
        return None, None

    if cakeday:
        counters.replies.labels(source='comment', type='cake').inc()
    elif recurring:
        counters.replies.labels(source='comment', type='repeat').inc()
    elif not allow_default:
        counters.replies.labels(source='comment', type='split').inc()
    else:
        counters.replies.labels(source='comment', type='single').inc()

    database.add_reminder(reminder)

    reminder.user.recurring_sent = 0

    return reminder, result_message
Beispiel #21
0
 def get_shifted_forward(self, seconds, name=None):
     start_time_str = datetime_from_timestamp(self.start_time + seconds)
     end_time_str = datetime_from_timestamp(self.end_time + seconds)
     return Period(start_time_str, end_time_str)
Beispiel #22
0
 objects = json.json()['data']
 if len(objects) == 0:
     break
 for comment in objects:
     previousEpoch = comment['created_utc'] - 1
     if comment['author'] not in static.BLACKLISTED_ACCOUNTS and comment[
             'subreddit'] != "RemindMeBot":
         body = comment['body'].lower().strip()
         trigger = None
         if trigger_in_text(body, trigger):
             #log.debug(f"Trigger: https://reddit.com{comment['permalink']}")
             trigger_count += 1
         elif trigger_start_of_line(body, trigger_split):
             time_string, target_date = parse_comment(
                 comment['body'], trigger_split,
                 utils.datetime_from_timestamp(comment['created_utc']))
             trigger_split_start_count += 1
             if time_string is not None:
                 trigger_split_start_string_count += 1
             if target_date is not None:
                 trigger_split_start_date_count += 1
             if time_string is None:
                 log.debug(
                     f"Start no string: https://reddit.com{comment['permalink']}"
                 )
             elif target_date is None:
                 log.debug(
                     f"Start no date  : https://reddit.com{comment['permalink']}"
                 )
         elif trigger_in_text(body, trigger_split):
             time_string, target_date = parse_comment(
Beispiel #23
0
def schedule_notifications():
    import os
    import math
    import datetime
    import calendar
    import logging

    os.chdir("/home/ucfabb0/code/semantica-docker/src/")

    logging.basicConfig(format='%(asctime)s %(levelname)s %(message)s',
                        datefmt='%m/%d/%Y %I:%M:%S %p',
                        level=logging.INFO)

    def is_visit_confirmed(visit):
        return visit['visited'] is not None or visit['deleted']

    def is_aggregated_personal_information_reviewed(pi):
        return pi['rpi'] > 0 and pi['rpriv'] > 0

    def is_personal_information_reviewed(pi):
        return pi['r'] > 0

    try:
        users = user_traces_db.get_all_users_push_ids()

        utc_dt = datetime.datetime.utcnow()
        utc_time = calendar.timegm(utc_dt.utctimetuple())

        number_of_days = 2
        days = set()
        day = utils.today_string()
        today = day
        for i in range(number_of_days):
            days.add(day)
            day = utils.previous_day(day)

        count_users = 0
        # user_filter = {'cab40ba2-244b-4394-aca1-4b1d87d969df', 'f5eb6ac8-adbc-4d1c-acf3-627d6fa95664',
        #                '4b2c57ae-5060-464d-9a9a-38a92268910f', '02cd63b6-a58d-4209-a9b4-b733a7fff2ae'}
        for user in users:
            user_id = user['user_id']

            # if user_id not in user_filter:
            #     continue

            has_visits_to_confirm = False
            has_places_to_review = False
            has_personal_information_to_review = False
            last_update_within_48_hours = False
            has_join_for_more_than_24_hours = False

            # get the last update
            last_user_update = user_traces_db.get_last_user_update(user_id)
            if not last_user_update:
                logging.warning("[%s] no traces available for user" % user_id)
                continue

            # get the user join date
            user_join_date = user_traces_db.get_user_join_date(user_id)
            if not user_join_date:
                logging.warning("[%s] no join date available" % user_id)

            last_update_within_48_hours = math.fabs(last_user_update -
                                                    utc_time) <= 96 * 3600
            has_join_for_more_than_24_hours = math.fabs(user_join_date -
                                                        utc_time) > 24 * 3600

            if not last_update_within_48_hours:
                logging.warning("[%s] not updated since 48 hours" % user_id)
                continue

            if not has_join_for_more_than_24_hours:
                logging.warning("[%s] not joined since 24 hours" % user_id)
                continue

            # get the latest utc offset
            utc_offset = user_traces_db.get_last_user_utc_offset(user_id)
            local_time = utc_time - utc_offset
            logging.info("[%s] utc offset: %s, local time: %s" %
                         (user_id, utc_offset, utils.day_hm(local_time)))

            local_dt = utils.datetime_from_timestamp(local_time)
            hour = local_dt.hour

            if hour != 20:
                logging.warning("[%s] not 20:00 local time yet (%s, %s)" %
                                (user_id, utils.day_hm(local_time), hour))
                continue

            # visits
            visits = user_traces_db.load_user_all_visits(user_id)
            if len(visits) > 0:
                # if this is around the time to send a notification,
                # check if we can send a notification
                recent_visits_to_confirm = [
                    v['visit_id'] for v in visits
                    if not is_visit_confirmed(v) and v['day'] in days
                ]
                total_number_of_recent_visits_to_confirm = len(
                    recent_visits_to_confirm)

                if total_number_of_recent_visits_to_confirm > 0:
                    has_visits_to_confirm = True

            # personal information per places
            personal_information = user_traces_db.load_all_user_personal_information(
                user_id)
            if len(personal_information) > 0:
                personal_information_per_place = {}
                for pi in personal_information:
                    pid = pi['pid']
                    if pid not in personal_information_per_place:
                        personal_information_per_place[pid] = []
                    personal_information_per_place[pid].append(pi)

                total_number_of_personal_information = 0
                total_number_of_personal_information_to_review = 0
                total_number_of_personal_information_reviewed = 0
                for pid, pis in personal_information_per_place.items():
                    number_of_personal_information = len(pis)
                    personal_information_to_review = [
                        pi['piid'] for pi in pis
                        if not is_personal_information_reviewed(pi)
                    ]
                    personal_information_reviewed = [
                        pi['piid'] for pi in pis
                        if is_personal_information_reviewed(pi)
                    ]
                    total_number_of_personal_information += number_of_personal_information
                    total_number_of_personal_information_to_review += 1 if len(
                        personal_information_to_review) > 0 else 0
                    total_number_of_personal_information_reviewed += 1 if len(
                        personal_information_reviewed) > 0 else 0

                if total_number_of_personal_information_to_review > 0:
                    has_places_to_review = True

            # aggregated personal information
            aggregated_personal_information = user_traces_db.load_user_aggregated_personal_information(
                user_id)
            if len(aggregated_personal_information) > 0:
                aggregated_personal_information_to_review = [
                    pi['piid'] for pi in aggregated_personal_information
                    if not is_aggregated_personal_information_reviewed(pi)
                ]
                number_of_aggregated_personal_information_to_review = len(
                    aggregated_personal_information_to_review)
                if number_of_aggregated_personal_information_to_review > 0:
                    has_personal_information_to_review = True

            count_users += 1

            # prepare the message to send via push notification
            logging.info(
                "[%s] visits %s / places %s / personal information %s" %
                (user_id, has_visits_to_confirm, has_places_to_review,
                 has_personal_information_to_review))
            if has_visits_to_confirm:
                logging.info("[%s] Send notification - visits" % user_id)
                message = "✅ We have detected new visits! Tap to review them"
                type = "timeline"
                send_push_notification(user_id,
                                       type,
                                       message,
                                       today,
                                       use_sandbox=True)
                send_push_notification(user_id,
                                       type,
                                       message,
                                       today,
                                       use_sandbox=False)

            elif has_places_to_review or has_personal_information_to_review:
                logging.info("[%s] Send notification - reviews" % user_id)
                message = "✅ We have detected new personal information about you! Tap to review them"
                type = "review"
                send_push_notification(user_id,
                                       type,
                                       message,
                                       use_sandbox=True)
                send_push_notification(user_id,
                                       type,
                                       message,
                                       use_sandbox=False)
    except:
        client.captureException()

    logging.info("[Done] processed %s users out of %s total users" %
                 (count_users, len(users)))
Beispiel #24
0
    def get(self):
        last_seen = utils.get_last_seen(self.keyword)
        log.debug(
            f"Fetching comments for keyword: {self.keyword} : {last_seen}")
        url = f"https://api.pushshift.io/reddit/comment/search?q={self.keyword}&limit=100&sort=desc&fields=created_utc,id"
        lag_url = "https://api.pushshift.io/reddit/comment/search?limit=1&sort=desc"
        try:
            json = requests.get(url,
                                headers={'User-Agent': config.USER_AGENT},
                                timeout=10)
            if json.status_code != 200:
                log.warning(
                    f"Could not parse data for search term: {self.keyword} status: {str(json.status_code)}"
                )
                return []
            comments = json.json()['data']

            if self.pushshift_lag_checked is None or \
              utils.datetime_now() - timedelta(minutes=10) > self.pushshift_lag_checked:
                log.debug("Updating pushshift comment lag")
                json = requests.get(lag_url,
                                    headers={'User-Agent': config.USER_AGENT},
                                    timeout=10)
                if json.status_code == 200:
                    comment_created = utils.datetime_from_timestamp(
                        json.json()['data'][0]['created_utc'])
                    self.pushshift_lag = round(
                        (utils.datetime_now() - comment_created).seconds / 60,
                        0)
                    self.pushshift_lag_checked = utils.datetime_now()

            if self.timeout_warn_threshold > 1:
                log.warning(
                    f"Recovered from timeouts after {self.consecutive_timeouts} attempts"
                )

            self.consecutive_timeouts = 0
            self.timeout_warn_threshold = 1

        except requests.exceptions.ReadTimeout:
            self.consecutive_timeouts += 1
            if self.consecutive_timeouts >= pow(self.timeout_warn_threshold,
                                                2) * 5:
                log.warning(
                    f"{self.consecutive_timeouts} consecutive timeouts for search term: {self.keyword}"
                )
                self.timeout_warn_threshold += 1
            return []

        except Exception as err:
            log.warning(
                f"Could not parse data for search term: {self.keyword}")
            log.warning(traceback.format_exc())
            return []

        if not len(comments):
            log.warning(f"No comments found for search term: {self.keyword}")
            return []

        result_comments = []
        for comment in comments:
            date_time = utils.datetime_from_timestamp(comment['created_utc'])
            if last_seen > date_time:
                break

            if not self.processed_comments.contains(comment['id']):
                result_comments.append(comment)

        return result_comments
Beispiel #25
0
    log.warning(f"Hit 10 exceptions, giving up")
    return None


end_time = utils.parse_datetime_string("2021-01-01 00:00:00")
start_time = utils.parse_datetime_string("2021-04-01 00:00:00")
log.info(
    f"Counting comments from {utils.get_datetime_string(start_time, False)} to {utils.get_datetime_string(end_time, False)}, {int((start_time - end_time).total_seconds())} seconds"
)
current_time = start_time

current_count = 0
while current_time > end_time:
    current_comments = get_comments(current_time)
    if current_comments is None:
        break
    ingest_delay_seconds = int(
        (utils.datetime_from_timestamp(current_comments[0]['retrieved_on']) -
         utils.datetime_from_timestamp(
             current_comments[0]['created_utc'])).total_seconds())
    for comment in current_comments:
        comment_time = utils.datetime_from_timestamp(comment['created_utc'])
        if comment_time != current_time:
            log.info(
                f"{utils.get_datetime_string(current_time)}	{current_count}	{ingest_delay_seconds}"
            )
            current_count = 0
            current_time = current_time - timedelta(minutes=15)
            break
        current_count += 1
    def to_dictionary(self):
        dictionary = {
            'strategy': self._strategy,
            'transaction_currency': self._transaction_currency,
            'counter_currency': self._counter_currency,
            'start_cash': self._start_cash,
            'start_crypto': self._start_crypto,
            'start_time': self._start_time,
            'end_time': self._end_time,
            'source': self._source,
            'resample_period': self._resample_period,
            'evaluate_profit_on_last_order': self._evaluate_profit_on_last_order,
            'transaction_cost_percent': self._transaction_cost_percent,
            'benchmark_backtest': self._benchmark_backtest,
            'time_delay': self._time_delay,
            'slippage': self._slippage,
            'order_generator_type': self._order_generator_type,
            'cash': self._cash,
            'crypto': self._crypto,
            'num_trades': self._num_trades,
            'num_buys': self._num_buys,
            'num_sells': self._num_sells,
            'order_signals': self.order_signals,
            'trading_df': self.trading_df,
            'end_cash': self._end_cash,
            'end_crypto': self._end_crypto,
            'max_drawdown': self._max_drawdown,
            'max_drawdown_duration': self._max_drawdown_duration,
            'sharpe_ratio': self._sharpe_ratio,
            'orders_df': self.orders_df,
            'buy_sell_pair_returns': self._buy_sell_pair_returns,
            'buy_sell_pair_gains': self._buy_sell_pair_gains,
            'buy_sell_pair_losses': self._buy_sell_pair_losses,
            'num_gains': self._num_gains,
            'num_losses': self._num_losses,
            'alpha': self._alpha,
            'beta': self._beta,
        }

        dictionary["strategy"] = dictionary["strategy"].get_short_summary()
        dictionary["utilized_signals"] = ", ".join(get_distinct_signal_types(self.order_signals))
        dictionary["start_time"] = datetime_from_timestamp(dictionary["start_time"])
        dictionary["end_time"] = datetime_from_timestamp(dictionary["end_time"])
        dictionary["mean_buy_sell_pair_return"] = self.mean_buy_sell_pair_return

        if self.end_price == None:
            dictionary["profit"] = "N/A"
            dictionary["profit_percent"] = "N/A"
            dictionary["profit_USDT"] = "N/A"
            dictionary["profit_percent_USDT"] = "N/A"
        else:
            try:
                dictionary["profit"] = self.profit
                dictionary["profit_percent"] = self.profit_percent
                dictionary["profit_USDT"] = self.profit_usdt
                dictionary["profit_percent_USDT"] = self.profit_percent_usdt
            except NoPriceDataException:
                logging.error("No price data!")
                dictionary["profit"] = "N/A"
                dictionary["profit_percent"] = "N/A"
                dictionary["profit_USDT"] = "N/A"
                dictionary["profit_percent_USDT"] = "N/A"
        return dictionary
    def get_report(self, include_order_signals=True):
        output = []
        output.append(str(self._strategy))

        # output.append(self.strategy.get_signal_report())
        output.append("--")

        output.append("\n* Order execution log *\n")
        output.append("Start balance: cash = {} {}, crypto = {} {}".format(self._start_cash, self._counter_currency,
                                                                           self._start_crypto, self._transaction_currency
                                                                           if self._start_crypto != 0 else ""))

        output.append("Start time: {}\n--".format(datetime_from_timestamp(self._start_time)))
        output.append("--")

        '''
        for i, order in enumerate(self.orders):
            output.append(str(order))
            if include_order_signals and len(self.order_signals) == len(self.orders): # for buy & hold we don't have signals
                output.append("   signal: {}".format(self.order_signals[i]))
        '''
        for i, row in self.orders_df.iterrows():
            order = row.order_obj
            signal = row.signal_obj
            output.append(str(order))
            if include_order_signals and signal is not None:  # for buy & hold we don't have signals
                output.append("   signal: {}".format(signal))
            output.append(f'   cash: {row.cash}    crypto: {row.crypto}')


        output.append("End time: {}".format(datetime_from_timestamp(self._end_time)))
        output.append("\nSummary")
        output.append("--")
        output.append("Number of trades: {}".format(self._num_trades))
        output.append("End cash: {0:.2f} {1}".format(self.end_cash, self._counter_currency))
        output.append("End crypto: {0:.6f} {1}".format(self.end_crypto, self._transaction_currency))
        output.append("End price: {}".format(self.end_price))

        sign = "+" if self.profit != None and self.profit >= 0 else ""
        output.append("Total value invested: {} {}".format(self._format_price_dependent_value(self.start_value),
                                                           self._counter_currency))
        output.append(
            "Total value after investment: {0:.2f} {1} ({2}{3:.2f}%)".format(self._format_price_dependent_value(self.end_value),
                                                                             self._counter_currency,
                                                                             sign,
                                                                             self._format_price_dependent_value(self.profit_percent)))
        output.append("Profit: {0:.2f} {1}".format(self._format_price_dependent_value(self.profit), self._counter_currency))

        if self._counter_currency != "USDT":
            sign = "+" if self.profit_usdt is not None and self.profit_usdt >= 0 else ""
            output.append("Total value invested: {:.2f} {} (conversion on {})".format(
                self._format_price_dependent_value(self.start_value_usdt),
                "USDT",
                datetime_from_timestamp(self._start_time)))
            output.append(
                    "Total value after investment: {0:.2f} {1} ({2}{3:.2f}%) (conversion on {4})".format(
                        self._format_price_dependent_value(self.end_value_usdt), "USDT", sign,
                        self._format_price_dependent_value(self.profit_percent_usdt),
                        datetime_from_timestamp(self._end_time)))
            output.append("Profit: {0:.2f} {1}".format(self._format_price_dependent_value(self.profit_usdt),
                                                       "USDT"))

        output.append("\nAdditional stats:")
        output.append("  Max drawdown: {}".format(self.max_drawdown))
        output.append("  Max drawdown duration: {}".format(self.max_drawdown_duration))
        output.append("  Sharpe ratio: {}".format(self.sharpe_ratio))
        output.append("  Alpha: {}".format(self.alpha))
        output.append("  Beta: {}".format(self.beta))

        output.append("  Buy-sell pair gains - overall stats")
        output.append("     min = {}, max = {}, mean = {}, stdev = {}".format(
            self.min_buy_sell_pair_gain,
            self.max_buy_sell_pair_gain,
            self.mean_buy_sell_pair_gain,
            self.std_buy_sell_pair_gain
        ))

        output.append("  Buy-sell pair losses - overall stats")
        output.append("     min = {}, max = {}, mean = {}, stdev = {}".format(
            self.min_buy_sell_pair_loss,
            self.max_buy_sell_pair_loss,
            self.mean_buy_sell_pair_loss,
            self.std_buy_sell_pair_loss
        ))

        output.append("  Buy-sell pair returns - overall stats")
        output.append("     min = {}, max = {}, mean = {}, stdev = {}".format(
            self.min_buy_sell_pair_return,
            self.max_buy_sell_pair_return,
            self.mean_buy_sell_pair_return,
            self.std_buy_sell_pair_return
        ))


        output.append("  Total buy-sell pairs: {}".format(self.num_buy_sell_pairs))
        output.append("  Total profitable trades: {}".format(self.num_profitable_trades))
        output.append("  Percent profitable trades: {}".format(self.percent_profitable_trades))
        output.append("  Percent unprofitable trades: {}".format(self.percent_unprofitable_trades))
        
        return "\n".join(output)
Beispiel #28
0
import discord_logging
from datetime import timedelta
import requests
import time

log = discord_logging.init_logging()

import utils

USER_AGENT = "Pushshift tester by u/Watchful1"
BETA_START = utils.datetime_from_timestamp(1622071192)
START_TIME = utils.datetime_now()
PROD_START = None

while True:
    url = "https://api.pushshift.io/reddit/comment/search"
    comments = requests.get(url,
                            headers={
                                'User-Agent': USER_AGENT
                            },
                            timeout=10).json()['data']
    comment_time = utils.datetime_from_timestamp(comments[0]['created_utc'])

    if PROD_START is None:
        PROD_START = comment_time

    change_from_prod_start = comment_time - PROD_START
    seconds_since_start = utils.datetime_now() - START_TIME
    ratio = (change_from_prod_start).seconds / (seconds_since_start).seconds
    if ratio > 0:
        catchup_seconds = (BETA_START - comment_time).seconds / ratio
Beispiel #29
0
 def committed_by(self):
   return [(t['authorPHID'], datetime_from_timestamp(t['dateCreated']))
           for t in self.transactions if t['action'] == 'commit']