def tx(transaction):
    # TODO - Transactions actually need done
    # Though, in order to finish this, addresses need done first
    check_transaction = db.session.query(TXs).filter_by(
        txid=transaction.lower()).first()
    if check_transaction is not None:
        txin = db.session.query(TXIn).filter_by(txid=transaction.lower()).all()
        txout = db.session.query(TxOut).filter_by(
            txid=transaction.lower()).all()
        if txin is not None:
            block_height_lookup = db.session.query(Blocks).filter_by(
                height=check_transaction.block_height).first()
            return render_template('transaction.html',
                                   the_datetime=format_time(
                                       block_height_lookup.time),
                                   block_height=check_transaction.block_height,
                                   inputs=txin,
                                   outputs=txout,
                                   this_transaction=transaction.lower())
        else:
            return render_template('404.html',
                                   error="Not a valid transaction"), 404
    else:
        return render_template('404.html',
                               error="Not a valid transaction"), 404
Exemplo n.º 2
0
    def _update(self):
        if self.timer:
            remaining = self._end_time - datetime.datetime.now()
            remaining_seconds = remaining.total_seconds()
            if remaining_seconds < 0:
                self.stop()
            elif remaining_seconds <= 30:
                self.text.setHtml(
                    "<font color='red' size='5'>{}</font>".format(
                        format_time(remaining)))
            else:
                self.text.setHtml("<font color='white'>{}</font>".format(
                    format_time(remaining)))
            self.realign()

            if remaining_seconds > 0 and self.timer:
                self.timer.singleShot(1000, self._update)
Exemplo n.º 3
0
def csv_profile(profile):
    """TODO Manipulate into lists to join with ','"""
    times = profile.times
    layout = [(profile.name, )]
    for t in times:
        d_name = h.get_day_name(t)
        layout.append((t, d_name, h.format_time(times[t])))
    path = db.create_csv_path(profile.name)
    with open(path, "w+") as f:
        for r in layout:
            f.write(",".join(r))
            f.write("\n")
        print(f"> Created {path}")
Exemplo n.º 4
0
 def _update(self):
     if self._active:
         remaining = self.end_time - datetime.datetime.now()
         remaining_seconds = remaining.total_seconds()
         self.progress.setValue(remaining.seconds)
         self.progress.update()
         if remaining_seconds <= 30:
             self.setProperty('Warning', True)
             self.setStyle(self.style())
             self._time_label.setProperty('Warning', True)
             self._time_label.setStyle(self._time_label.style())
         if remaining_seconds <= 0:
             self._remove()
         self._time_label.setText(format_time(remaining))
     QTimer.singleShot(1000, self._update)
Exemplo n.º 5
0
 def get_html(obj):
     if isinstance(obj, list):
         return "<ul>" + ''.join([get_html(log) for log in obj]) + "</ul>"
     else:
         if isinstance(obj, types.BuyLimitOrderRequest):
             return "<li>%s Limit Buy. Amount=%d Unit Price=%f BTC<ul>%s</ul></li>" % \
                    (
                        helpers.format_time(obj.block_timestamp), obj.volume_requested,
                        obj.unit_price / OneHundredMillionF,
                        get_html(obj.trade_history))
         elif isinstance(obj, types.SellLimitOrderRequest):
             return "<li>%s Limit Sell. Amount=%d Unit Price=%f BTC<ul>%s</ul></li>" % \
                    (
                        helpers.format_time(obj.block_timestamp), obj.volume_requested,
                        obj.unit_price / OneHundredMillionF,
                        get_html(obj.trade_history))
         elif isinstance(obj, types.BuyMarketOrderRequest):
             return "<li>%s Market Buy. Total Value=%f BTC<ul>%s</ul></li>" % \
                    (helpers.format_time(obj.block_timestamp), obj.total_price_requested / OneHundredMillionF,
                     get_html(obj.trade_history))
         elif isinstance(obj, types.SellMarketOrderRequest):
             return "<li>%s Market Sell. Amount=%f <ul>%s</ul></li>" % \
                    (helpers.format_time(obj.block_timestamp), obj.volume_requested,
                     get_html(obj.trade_history))
         elif isinstance(obj, types.TransferRequest):
             return "<li>%s Transfer. Transfer To:<ul>%s</ul></li>" % \
                    (helpers.format_time(obj.block_timestamp),
                     ''.join(
                         ["<li>%s: %d</li>" % (addr, amount) for addr, amount in obj.transfer_targets.iteritems()]))
         elif isinstance(obj, UserPayLog):
             return "<li>%s DPS = %f BTC, share number = %d, payment received = %f BTC</li>" % \
                    (helpers.format_time(obj.block_timestamp), obj.DPS / OneHundredMillionF, obj.share_N,
                     obj.DPS * obj.share_N / OneHundredMillionF)
         elif isinstance(
                 obj, types.TradeItem
         ) and obj.trade_type == types.TradeItem.TRADE_TYPE_CANCELLED:
             return "<li>Canceled by user</li>"
         elif isinstance(
                 obj, types.TradeItem
         ) and obj.trade_type != types.TradeItem.TRADE_TYPE_CANCELLED:
             return "<li>%s Trade Amount: %d, Unit Price: %f BTC</li>" % \
                    (helpers.format_time(obj.timestamp), obj.amount, obj.unit_price / OneHundredMillionF)
         else:
             raise NotImplementedError()
Exemplo n.º 6
0
 def _update(self):
     if self._active:
         remaining = self.end_time - datetime.datetime.now()
         remaining_seconds = remaining.total_seconds()
         self.progress.setValue(remaining.seconds)
         self.progress.update()
         if remaining_seconds <= 30:
             self.setProperty('Warning', True)
             self.setStyle(self.style())
             self._time_label.setProperty('Warning', True)
             self._time_label.setStyle(self._time_label.style())
         if remaining_seconds <= 0:
             target = self.parentWidget()
             spells = self.parentWidget().parentWidget().parentWidget(
             ).parentWidget().parentWidget()
             if isinstance(spells, Spells) and isinstance(
                     target, SpellTarget):
                 spells.logstreamer._handleTimerExpiry(
                     self.spell, target.name)
             self._remove()
         self._time_label.setText(format_time(remaining))
     QTimer.singleShot(1000, self._update)
Exemplo n.º 7
0
    def update_buffer(self):
        self.buffer.clear()

        maxx = self.window.getmaxyx()[1]
        c = 1

        longest_metadata_string_len = 0
        for n in self.timeline:
            if "direct" in self.timeline_type:
                user_string = "%s -> %s" % (n["sender"]["screen_name"],
                                            n["recipient"]["screen_name"])
                source_msg = ""
            else:
                user_string = "%s" % (n["user"]["screen_name"])
                raw_source_msg = "from %s" % (n["source"])
                source_msg = self.html_regex.sub("", raw_source_msg)
            if "in_reply_to_status_id" in n and n[
                    "in_reply_to_status_id"] is not None:
                if not config.config["show_source"]:
                    user_string += " +"
                else:
                    source_msg += " [+]"
            if "retweeted_status" in n:
                user_string = "%s [%s's RD]" % (
                    n["retweeted_status"]["user"]["screen_name"],
                    n["user"]["screen_name"])
                if "in_reply_to_status_id" in n["retweeted_status"]:
                    if not config.config["show_source"]:
                        user_string += " +"
                    else:
                        source_msg += " [+]"
            datetime_notice = helpers.notice_datetime(n)
            time_msg = helpers.format_time(helpers.time_since(datetime_notice),
                                           short_form=True)
            metadata_string = time_msg + " " + user_string
            if config.config["show_source"]:
                metadata_string += " " + source_msg
            if len(metadata_string) > longest_metadata_string_len:
                longest_metadata_string_len = len(metadata_string)

        for n in self.timeline:
            from_user = None
            to_user = None
            repeating_user = None
            if "direct" in self.timeline_type:
                from_user = n["sender"]["screen_name"]
                to_user = n["recipient"]["screen_name"]
                source_msg = ""
            else:
                if "retweeted_status" in n:
                    repeating_user = n["user"]["screen_name"]
                    n = n["retweeted_status"]
                from_user = n["user"]["screen_name"]
                raw_source_msg = "from %s" % (n["source"])
                source_msg = self.html_regex.sub("", raw_source_msg)
                repeat_msg = ""
                if n["in_reply_to_status_id"] is not None:
                    source_msg += " [+]"
            datetime_notice = helpers.notice_datetime(n)

            time_msg = helpers.format_time(helpers.time_since(datetime_notice),
                                           short_form=True)

            for user in [
                    user for user in [from_user, to_user, repeating_user]
                    if user is not None
            ]:
                if not user in config.session_store.user_cache:
                    config.session_store.user_cache[user] = random.choice(
                        identicurse.base_colours.items())[1]

            if "ic__paused_on" in n and c != 1:
                self.buffer.append([("-",
                                     identicurse.colour_fields["pause_line"])])
                self.buffer.append([("", identicurse.colour_fields["none"])])

            # Build the line
            line = []

            if c < 10:
                cout = " " + str(c)
            else:
                cout = str(c)
            line.append((cout, identicurse.colour_fields["notice_count"]))

            if (c - 1) == self.chosen_one:
                line.append((' * ', identicurse.colour_fields["selector"]))
            else:
                line.append((' ' * 3, identicurse.colour_fields["selector"]))

            if config.config['compact_notices']:
                line.append((time_msg, identicurse.colour_fields["time"]))
                line.append((" ", identicurse.colour_fields["none"]))

            if config.config['user_rainbow']:
                line.append(
                    (from_user, config.session_store.user_cache[from_user]))
            else:
                line.append((from_user, identicurse.colour_fields["username"]))
            user_length = len(from_user)

            if to_user is not None:
                line.append((" -> ", identicurse.colour_fields["none"]))
                if config.config['user_rainbow']:
                    line.append(
                        (to_user, config.session_store.user_cache[to_user]))
                else:
                    line.append(
                        (to_user, identicurse.colour_fields["username"]))
                user_length += (len(" -> ") + len(to_user))

            if repeating_user is not None:
                if config.config["compact_notices"]:
                    line.append((" [", identicurse.colour_fields["none"]))
                else:
                    line.append(
                        (" [ repeat by ", identicurse.colour_fields["none"]))

                if config.config['user_rainbow']:
                    line.append(
                        (repeating_user,
                         config.session_store.user_cache[repeating_user]))
                else:
                    line.append((repeating_user,
                                 identicurse.colour_fields["username"]))

                if config.config["compact_notices"]:
                    line.append(("'s RD]", identicurse.colour_fields["none"]))
                    user_length += (len(" [") + len(repeating_user) +
                                    len("'s RD]"))
                else:
                    line.append((" ]", identicurse.colour_fields["none"]))
                    user_length += (len(" [ repeat by ") +
                                    len(repeating_user) + len(" ]"))

            if not config.config['compact_notices']:
                if config.config["show_source"]:
                    line.append(
                        (' ' *
                         (maxx -
                          ((len(source_msg) + len(time_msg) + user_length +
                            (6 + len(cout))))),
                         identicurse.colour_fields["none"]))
                else:
                    line.append((' ' * (maxx - ((len(time_msg) + user_length +
                                                 (5 + len(cout))))),
                                 identicurse.colour_fields["none"]))
                line.append((time_msg, identicurse.colour_fields["time"]))
                if config.config["show_source"]:
                    line.append((' ', identicurse.colour_fields["none"]))
                    line.append(
                        (source_msg, identicurse.colour_fields["source"]))
                self.buffer.append(line)
                line = []
            else:
                detail_char = ""
                if (not config.config["show_source"]):
                    if "in_reply_to_status_id" in n and n[
                            "in_reply_to_status_id"] is not None:
                        detail_char = "+"
                    elif "retweeted_status" in n:
                        detail_char = "~"
                    line.append((" %s" % (detail_char),
                                 identicurse.colour_fields["source"]))
                if config.config["show_source"]:
                    line.append((" " + source_msg,
                                 identicurse.colour_fields["source"]))
                    line.append((" " * (
                        (longest_metadata_string_len -
                         (user_length + len(time_msg) + len(source_msg) + 2))),
                                 identicurse.colour_fields["none"]))
                else:
                    if detail_char == "":
                        line.append((" ", identicurse.colour_fields["none"]))
                    line.append((" " * ((longest_metadata_string_len -
                                         (user_length + len(time_msg) + 1))),
                                 identicurse.colour_fields["none"]))
                line.append((" | ", identicurse.colour_fields["none"]))

            try:
                notice_entities = helpers.split_entities(n['text'])
                for entity in notice_entities:
                    if len(entity['text']) > 0:
                        if entity['type'] in ['user', 'group', 'tag']:
                            entity_text_no_symbol = entity['text'][1:]
                            cache = getattr(config.session_store,
                                            '%s_cache' % (entity['type']))
                            if not entity_text_no_symbol in cache:
                                cache[entity_text_no_symbol] = random.choice(
                                    identicurse.base_colours.items())[1]
                            if config.config['%s_rainbow' % (entity['type'])]:
                                line.append((entity['text'],
                                             cache[entity_text_no_symbol]))
                            else:
                                if entity['type'] == "user":
                                    line.append((
                                        entity['text'],
                                        identicurse.colour_fields["username"]))
                                else:
                                    line.append((entity['text'],
                                                 identicurse.colour_fields[
                                                     entity['type']]))
                        else:
                            line.append((entity['text'],
                                         identicurse.colour_fields["notice"]))

                self.buffer.append(line)

            except UnicodeDecodeError:
                self.buffer.append([
                    ("Caution: Terminal too shit to display this notice.",
                     identicurse.colour_fields["warning"])
                ])

            if config.config["show_notice_links"]:
                line = []
                base_url = helpers.base_url_regex.findall(
                    self.conn.api_path)[0][0]
                if self.timeline_type in ["direct", "sentdirect"]:
                    notice_link = "%s/message/%s" % (base_url, str(n["id"]))
                else:
                    notice_link = "%s/notice/%s" % (base_url, str(n["id"]))
                line.append(("<%s>" % (notice_link),
                             identicurse.colour_fields["notice_link"]))
                self.buffer.append(line)

            if not config.config['compact_notices']:
                self.buffer.append([])

            c += 1
Exemplo n.º 8
0
    old_alerts = []
    comments = get_comments()
    for comment in comments:
        old_alerts.extend(comment['body'].splitlines())
    old_alerts = set(old_alerts)
    to_post = []
    for alert in set(ALERTS):
        if alert not in old_alerts:
            to_post.append(alert)
    credentials = get_credentials()
    if to_post:
        post_comment(credentials, '\n'.join(to_post))


if __name__ == '__main__':
    prev_label = None
    credentials = get_credentials()
    while True:
        try:
            label = get_confirmed()
            if label == prev_label:
                try:
                    post_comment(credentials, 'SITE DOWN!')
                except:
                    traceback.print_exc()
            prev_label = label
        except:
            traceback.print_exc()
        print(format_time(get_cur_time()))
        time.sleep(7200)
Exemplo n.º 9
0
    def update_buffer(self):
        self.buffer.clear()

        if self.timeline_type == "user":
            if self.profile is not None:
                for field in [
                        # display name,           internal field name,  skip a line after this field?
                    ("Real Name", "name", True),
                    ("Bio", "description", False),
                    ("Location", "location", False),
                    ("URL", "url", False),
                    ("User ID", "id", False),
                    ("Joined at", "created_at", True),
                    ("Followed by", "followers_count", False),
                    ("Following", "friends_count", False),
                    ("Followed by you", "following", True),
                    ("Favourites", "favourites_count", False),
                    ("Notices", "statuses_count", False),
                    ("Average daily notices", "notices_per_day", True)
                ]:
                    if (self.profile[field[1]]
                            is not None) and (self.profile[field[1]] != ""):
                        line = []

                        line.append(
                            (field[0] + ":",
                             identicurse.colour_fields['profile_fields']))
                        line.append((" ", identicurse.colour_fields['none']))

                        line.append(
                            (self.profile[field[1]],
                             identicurse.colour_fields['profile_values']))

                        self.buffer.append(line)

                    if field[2]:
                        self.buffer.append([
                            ("", identicurse.colour_fields['none'])
                        ])
            else:
                self.buffer.append([
                    ("There is no user called @%s on this instance." %
                     (self.type_params['screen_name']),
                     identicurse.colour_fields['none'])
                ])

        if self.timeline_type == "group":
            if self.profile is not None:
                for field in [
                        # display name,           internal field name,  skip a line after this field?
                    ("Name", "fullname", True),
                    ("Description", "description", False),
                    ("Location", "location", False),
                    ("Homepage", "homepage", False),
                    ("Group ID", "id", False),
                    ("Created at", "created", False),
                    ("Members", "member_count", True),
                ]:
                    if (self.profile[field[1]]
                            is not None) and (self.profile[field[1]] != ""):
                        line = []

                        line.append(
                            (field[0] + ":",
                             identicurse.colour_fields['profile_fields']))
                        line.append((" ", identicurse.colour_fields['none']))

                        line.append(
                            (self.profile[field[1]],
                             identicurse.colour_fields['profile_values']))

                        self.buffer.append(line)

                    if field[2]:
                        self.buffer.append([
                            ("", identicurse.colour_fields['none'])
                        ])
            else:
                self.buffer.append([
                    ("There is no group called !%s on this instance." %
                     (self.type_params['nickname']),
                     identicurse.colour_fields['none'])
                ])

        maxx = self.window.getmaxyx()[1]
        c = 1

        longest_metadata_string_len = 0
        for n in self.timeline:
            if n["text"] is None:
                n["text"] = ""
            if "direct" in self.timeline_type:
                user_string = "%s -> %s" % (n["sender"]["screen_name"],
                                            n["recipient"]["screen_name"])
                source_msg = ""
            else:
                atless_reply = False
                if "in_reply_to_screen_name" in n and n[
                        "in_reply_to_screen_name"] is not None:
                    atless_reply = True
                    for entity in helpers.split_entities(n["text"]):
                        if entity[
                                "type"] == "user" and entity["text"][1:].lower(
                                ) == n["in_reply_to_screen_name"].lower():
                            atless_reply = False
                            break
                if atless_reply:
                    if "user" in n:
                        user_string = "%s" % (n["user"]["screen_name"])
                    else:
                        user_string = "<no username>"
                    user_string += " -> %s" % (n["in_reply_to_screen_name"])
                else:
                    if "user" in n:
                        user_string = "%s" % (n["user"]["screen_name"])
                    else:
                        user_string = ""
                if (n["source"] == "ostatus") and (
                        "user" in n) and "statusnet_profile_url" in n["user"]:
                    raw_source_msg = "from %s" % (helpers.domain_regex.findall(
                        n["user"]["statusnet_profile_url"])[0][2])
                else:
                    raw_source_msg = "from %s" % (n["source"])
                source_msg = self.html_regex.sub("", raw_source_msg)
            if "in_reply_to_status_id" in n and n[
                    "in_reply_to_status_id"] is not None:
                if not config.config["show_source"]:
                    user_string += " +"
                else:
                    source_msg += " [+]"
            if "retweeted_status" in n:
                user_string = "%s [%s's RD]" % (
                    n["retweeted_status"]["user"]["screen_name"],
                    n["user"]["screen_name"])
                if "in_reply_to_status_id" in n["retweeted_status"]:
                    if not config.config["show_source"]:
                        user_string += " +"
                    else:
                        source_msg += " [+]"
            datetime_notice = helpers.normalise_datetime(n["created_at"])
            time_msg = helpers.format_time(helpers.time_since(datetime_notice),
                                           short_form=True)
            metadata_string = time_msg + " " + user_string
            if config.config["show_source"]:
                metadata_string += " " + source_msg
            if len(metadata_string) > longest_metadata_string_len:
                longest_metadata_string_len = len(metadata_string)

        for n in self.timeline:
            if n["text"] is None:
                n["text"] = ""
            from_user = None
            to_user = None
            repeating_user = None
            if "direct" in self.timeline_type:
                from_user = n["sender"]["screen_name"]
                to_user = n["recipient"]["screen_name"]
                source_msg = ""
            else:
                if "retweeted_status" in n:
                    repeating_user = n["user"]["screen_name"]
                    n = n["retweeted_status"]
                if "user" in n:
                    from_user = n["user"]["screen_name"]
                else:
                    from_user = "******"
                atless_reply = False
                if "in_reply_to_screen_name" in n and n[
                        "in_reply_to_screen_name"] is not None:
                    atless_reply = True
                    for entity in helpers.split_entities(n["text"]):
                        if entity[
                                "type"] == "user" and entity["text"][1:].lower(
                                ) == n["in_reply_to_screen_name"].lower():
                            atless_reply = False
                            break
                if atless_reply:
                    to_user = n["in_reply_to_screen_name"]
                if (n["source"] == "ostatus") and (
                        "user" in n) and "statusnet_profile_url" in n["user"]:
                    raw_source_msg = "from %s" % (helpers.domain_regex.findall(
                        n["user"]["statusnet_profile_url"])[0][2])
                else:
                    raw_source_msg = "from %s" % (n["source"])
                source_msg = self.html_regex.sub("", raw_source_msg)
                repeat_msg = ""
                if n["in_reply_to_status_id"] is not None:
                    source_msg += " [+]"
            datetime_notice = helpers.normalise_datetime(n["created_at"])

            time_msg = helpers.format_time(helpers.time_since(datetime_notice),
                                           short_form=True)

            for user in [
                    user for user in [from_user, to_user, repeating_user]
                    if user is not None
            ]:
                if not user in config.session_store.user_cache:
                    config.session_store.user_cache[
                        user] = helpers.colour_from_name([
                            item[1]
                            for item in identicurse.base_colours.items()
                        ], user.lower())

            if "ic__paused_on" in n and c != 1:
                self.buffer.append([("-",
                                     identicurse.colour_fields["pause_line"])])
                self.buffer.append([("", identicurse.colour_fields["none"])])

            # Build the line
            line = []

            if c < 10:
                cout = " " + str(c)
            else:
                cout = str(c)
            line.append((cout, identicurse.colour_fields["notice_count"]))

            if (c - 1) == self.chosen_one:
                line.append((' * ', identicurse.colour_fields["selector"]))
            else:
                line.append((' ' * 3, identicurse.colour_fields["selector"]))

            if config.config['compact_notices']:
                line.append((time_msg, identicurse.colour_fields["time"]))
                line.append((" ", identicurse.colour_fields["none"]))

            if config.config['user_rainbow']:
                line.append(
                    (from_user, config.session_store.user_cache[from_user]))
            else:
                line.append((from_user, identicurse.colour_fields["username"]))
            if from_user is not None:
                user_length = len(from_user)
            else:
                user_length = None

            if to_user is not None:
                line.append((" -> ", identicurse.colour_fields["none"]))
                if config.config['user_rainbow']:
                    line.append(
                        (to_user, config.session_store.user_cache[to_user]))
                else:
                    line.append(
                        (to_user, identicurse.colour_fields["username"]))
                user_length += len(" -> ") + len(to_user)

            if repeating_user is not None:
                if config.config["compact_notices"]:
                    line.append((" [", identicurse.colour_fields["none"]))
                else:
                    line.append(
                        (" [ repeat by ", identicurse.colour_fields["none"]))

                if config.config['user_rainbow']:
                    line.append(
                        (repeating_user,
                         config.session_store.user_cache[repeating_user]))
                else:
                    line.append((repeating_user,
                                 identicurse.colour_fields["username"]))

                if config.config["compact_notices"]:
                    line.append(("'s RD]", identicurse.colour_fields["none"]))
                    user_length += len(" [") + len(repeating_user) + len(
                        "'s RD]")
                else:
                    line.append((" ]", identicurse.colour_fields["none"]))
                    user_length += len(" [ repeat by ") + len(
                        repeating_user) + len(" ]")

            if not config.config['compact_notices']:
                if config.config["show_source"]:
                    line.append(
                        (' ' *
                         (maxx -
                          ((len(source_msg) + len(time_msg) + user_length +
                            (6 + len(cout))))),
                         identicurse.colour_fields["none"]))
                else:
                    line.append((' ' * (maxx - ((len(time_msg) + user_length +
                                                 (5 + len(cout))))),
                                 identicurse.colour_fields["none"]))
                line.append((time_msg, identicurse.colour_fields["time"]))
                if config.config["show_source"]:
                    line.append((' ', identicurse.colour_fields["none"]))
                    line.append(
                        (source_msg, identicurse.colour_fields["source"]))
                self.buffer.append(line)
                line = []
            else:
                detail_char = ""
                if (not config.config["show_source"]):
                    if "in_reply_to_status_id" in n and n[
                            "in_reply_to_status_id"] is not None:
                        detail_char = "+"
                    elif "retweeted_status" in n:
                        detail_char = "~"
                    line.append((" %s" % (detail_char),
                                 identicurse.colour_fields["source"]))
                if config.config["show_source"]:
                    line.append((" " + source_msg,
                                 identicurse.colour_fields["source"]))
                    line.append((" " * (
                        (longest_metadata_string_len -
                         (user_length + len(time_msg) + len(source_msg) + 2))),
                                 identicurse.colour_fields["none"]))
                else:
                    if detail_char == "":
                        line.append((" ", identicurse.colour_fields["none"]))
                    line.append((" " * ((longest_metadata_string_len -
                                         (user_length + len(time_msg) + 1))),
                                 identicurse.colour_fields["none"]))
                line.append((" | ", identicurse.colour_fields["none"]))

            try:
                min_x_offset = reduce(
                    (lambda acc_length, block:
                     (acc_length if (len(block) < 3) else max(
                         acc_length, block[2])) + len(block[0])), line, 0
                )  # determine how far along the line items beginning now would be; this will be used so that wrapped lines get correct indentation
                notice_entities = helpers.split_entities(n['text'] or "")
                for entity in notice_entities:
                    if len(entity['text']) > 0:
                        if entity['type'] in ['user', 'group', 'tag']:
                            entity_text_no_symbol = entity['text'][1:]
                            cache = getattr(config.session_store,
                                            '%s_cache' % (entity['type']))
                            if not entity_text_no_symbol in cache:
                                cache[
                                    entity_text_no_symbol] = helpers.colour_from_name(
                                        [
                                            item[1] for item in
                                            identicurse.base_colours.items()
                                        ], entity_text_no_symbol.lower())
                            if config.config['%s_rainbow' % (entity['type'])]:
                                line.append((entity['text'],
                                             cache[entity_text_no_symbol],
                                             min_x_offset))
                            else:
                                if entity['type'] == "user":
                                    line.append(
                                        (entity['text'],
                                         identicurse.colour_fields["username"],
                                         min_x_offset))
                                else:
                                    line.append(
                                        (entity['text'],
                                         identicurse.colour_fields[
                                             entity['type']], min_x_offset))
                        else:
                            line.append((entity['text'],
                                         identicurse.colour_fields["notice"],
                                         min_x_offset))

                self.buffer.append(line)

            except UnicodeDecodeError:
                self.buffer.append([
                    ("Caution: Terminal too shit to display this notice.",
                     identicurse.colour_fields["warning"])
                ])

            if config.config["show_notice_links"]:
                line = []
                base_url = helpers.base_url_regex.findall(
                    self.conn.api_path)[0][0]
                if self.timeline_type in ["direct", "sentdirect"]:
                    notice_link = "%s/message/%s" % (base_url, str(n["id"]))
                else:
                    notice_link = "%s/notice/%s" % (base_url, str(n["id"]))
                line.append(("<%s>" % (notice_link),
                             identicurse.colour_fields["notice_link"]))
                self.buffer.append(line)

            if not config.config['compact_notices']:
                self.buffer.append([])

            c += 1
Exemplo n.º 10
0
def account(request, asset_name, user_address):
    def get_html(obj):
        if isinstance(obj, list):
            return "<ul>" + ''.join([get_html(log) for log in obj]) + "</ul>"
        else:
            if isinstance(obj, types.BuyLimitOrderRequest):
                return "<li>%s Limit Buy. Amount=%d Unit Price=%f BTC<ul>%s</ul></li>" % \
                       (
                           helpers.format_time(obj.block_timestamp), obj.volume_requested,
                           obj.unit_price / OneHundredMillionF,
                           get_html(obj.trade_history))
            elif isinstance(obj, types.SellLimitOrderRequest):
                return "<li>%s Limit Sell. Amount=%d Unit Price=%f BTC<ul>%s</ul></li>" % \
                       (
                           helpers.format_time(obj.block_timestamp), obj.volume_requested,
                           obj.unit_price / OneHundredMillionF,
                           get_html(obj.trade_history))
            elif isinstance(obj, types.BuyMarketOrderRequest):
                return "<li>%s Market Buy. Total Value=%f BTC<ul>%s</ul></li>" % \
                       (helpers.format_time(obj.block_timestamp), obj.total_price_requested / OneHundredMillionF,
                        get_html(obj.trade_history))
            elif isinstance(obj, types.SellMarketOrderRequest):
                return "<li>%s Market Sell. Amount=%f <ul>%s</ul></li>" % \
                       (helpers.format_time(obj.block_timestamp), obj.volume_requested,
                        get_html(obj.trade_history))
            elif isinstance(obj, types.TransferRequest):
                return "<li>%s Transfer. Transfer To:<ul>%s</ul></li>" % \
                       (helpers.format_time(obj.block_timestamp),
                        ''.join(
                            ["<li>%s: %d</li>" % (addr, amount) for addr, amount in obj.transfer_targets.iteritems()]))
            elif isinstance(obj, UserPayLog):
                return "<li>%s DPS = %f BTC, share number = %d, payment received = %f BTC</li>" % \
                       (helpers.format_time(obj.block_timestamp), obj.DPS / OneHundredMillionF, obj.share_N,
                        obj.DPS * obj.share_N / OneHundredMillionF)
            elif isinstance(
                    obj, types.TradeItem
            ) and obj.trade_type == types.TradeItem.TRADE_TYPE_CANCELLED:
                return "<li>Canceled by user</li>"
            elif isinstance(
                    obj, types.TradeItem
            ) and obj.trade_type != types.TradeItem.TRADE_TYPE_CANCELLED:
                return "<li>%s Trade Amount: %d, Unit Price: %f BTC</li>" % \
                       (helpers.format_time(obj.timestamp), obj.amount, obj.unit_price / OneHundredMillionF)
            else:
                raise NotImplementedError()

    chained_state = ChainedState.get_latest_state()

    try:
        tradings = [
            t for t in chained_state.user_history[asset_name][user_address]
            if isinstance(t, (types.BuyLimitOrderRequest,
                              types.SellLimitOrderRequest,
                              types.BuyMarketOrderRequest,
                              types.SellMarketOrderRequest,
                              types.TransferRequest))
        ]
    except:
        tradings = []
    if tradings:
        trade_html = get_html(tradings)
    else:
        trade_html = "<ul><li>Empty</li></ul>"

    try:
        pays = [
            t for t in chained_state.user_history[asset_name][user_address]
            if isinstance(t, UserPayLog)
        ]
    except:
        pays = []
    if pays:
        pay_html = get_html(pays)
    else:
        pay_html = "<ul><li>Empty</li></ul>"

    try:
        failures = [
            t for t in chained_state.failed_requests
            if t.transaction.input_addresses[0] == user_address
        ]
    except:
        failures = []
    if failures:
        failures_html = "<ul>%s</ul>" % (''.join([
            "<li>%s  Transaction hash: <a href=\"http://blockchain.info/tx/%s\" target=\"_blank\">%s</a> Reason: %s</li>"
            % (helpers.format_time(o.block_timestamp), o.transaction.hash,
               o.transaction.hash, o.readable_message()) for o in failures
        ]))
    else:
        failures_html = ""  # we don't display the failure tag, this value is used in the template

    return render_to_response(
        'account.html', {
            'asset_name': asset_name,
            'trade_html': trade_html,
            'pay_html': pay_html,
            'failures_html': failures_html
        })
Exemplo n.º 11
0
def run_analysis(config=None, is_gui=False):
    """Primary RWSM analysis loop
    
    Keyword Arguments:
        config {instance} -- ConfigParser instance holding parameter values (default: {None})
        is_gui {bool} -- indicates if running from ArcMap toolbox GUI (default: {False})
    """

    # Logger used for command line debugging, not supported in beta.
    # logger = helpers.get_logger(LOG_LEVEL)
    # logger.info('Starting analysis...')

    # Initialize structures for user output.
    start_time = time.clock()
    if is_gui:
        arcpy.SetProgressor("default", "Initiating workspace...")

    # Load values from config file
    if not config:
        CONFIG_FILE_NAME = "rwsm.ini"
        if os.path.isfile(CONFIG_FILE_NAME):
            config = helpers.load_config(CONFIG_FILE_NAME)
    workspace = config.get("RWSM", "workspace")
    workspace = os.path.join(workspace, "rwsm")
    watersheds_file_name = config.get("RWSM", "watersheds")
    watersheds_field = config.get("RWSM", "watersheds_field")

    # Create workspace
    (temp_file_name, out_file_name,
     workspace) = helpers.init_workspace(workspace)

    # Instantiate watershed, run dissolve
    if is_gui:
        arcpy.SetProgressor("default", "Dissolving watersheds...")

    watersheds = Watersheds(config)
    dissolved_watersheds = watersheds.dissolve()

    # Change to temporary workspace
    arcpy.env.workspace = temp_file_name

    # Set aside tracking data structures
    land_use_descriptions = []

    # Gather configuration file values --------------------------------------------

    # Land Use (Shapefile)
    land_use_file_name = config.get("RWSM", "land_use")
    land_use_field = config.get("RWSM", "land_use_field")
    land_use_LU_code_field = config.get("RWSM", "land_use_LU_code_field")
    land_use_LU_bin_field = config.get("RWSM", "land_use_LU_bin_field")
    land_use_LU_desc_field = config.get("RWSM", "land_use_LU_desc_field")
    land_use_LU_class_field = config.get("RWSM", "land_use_LU_class_field")
    land_use_LU_file_name = config.get("RWSM", "land_use_LU_file_name")

    # Soils (Shapefile)
    soils_file_name = config.get("RWSM", "soils_file_name")
    soils_field = config.get("RWSM", "soils_field")
    soils_bin_field = config.get("RWSM", "soils_bin_field")

    # Slope (Raster)
    slope_file_name = config.get("RWSM", "slope_file_name")
    slope_bin_field = config.get("RWSM", "slope_bin_field")

    # precipitation (Raster)
    precipitation_file_name = config.get("RWSM", "precipitation_file_name")

    # Run-off Coefficient (CSV or Table)
    runoff_coeff_file_name = config.get("RWSM", "runoff_coeff_file_name")
    runoff_coeff_field = config.get("RWSM", "runoff_coeff_field")

    # Populate Slope Bins data structure ------------------------------------------
    if is_gui:
        arcpy.SetProgressor("default", "Computing slope bins...")

    slope_raster = arcpy.sa.Raster(slope_file_name)
    slope_bins = helpers.load_slope_bins(config)
    slope_bins_w_codes = helpers.load_slope_bins(config)
    map(lambda x: x.append((slope_bins_w_codes.index(x) + 1) * 100),
        slope_bins_w_codes)

    # Get precipitation raster ----------------------------------------------------
    if is_gui:
        arcpy.SetProgressor("default", "Importing precipitation raster...")
    precipitation_raster = arcpy.sa.Raster(precipitation_file_name)

    # Set aside structure for holding intersected watershed references ------------
    intersected_watersheds = []

    # Setup statistics output object ----------------------------------------------
    if is_gui:
        arcpy.SetProgressor("default", "Initiating statistics writer...")
    writer = Stats_Writer(config, watersheds.get_names(), slope_bins)

    # Initialize data structures for updating progressor label
    if is_gui:
        n_watersheds = len(watersheds.get_names())
        cnt = 1

    # List of tuples for holding error information
    watershed_errors = []

    # Load code to coefficient lookup table
    codes_to_coeff_lookup = helpers.get_code_to_coeff_lookup(config)

    # Iterate through watersheds, run precipitation clip analysis -----------------
    with arcpy.da.SearchCursor(dissolved_watersheds,
                               (watersheds_field, "SHAPE@")) as cursor:
        for watershed in cursor:
            try:
                # Prepare watershed data ----------------------------------------------
                watershed_name = watershed[0]
                watershed_val = watershed[1]

                if is_gui:
                    msg = "Analysing {}, watershed {} of {}...".format(
                        watershed_name, cnt, n_watersheds)
                    arcpy.SetProgressor("step", msg, 0, n_watersheds, cnt)

                # Remove illegal characters from watershed name
                watershed_name = helpers.strip_chars(
                    watershed_name, '!@#$%^&*()-+=,<>?/\~`[]{}.')

                # Land Use Operations -------------------------------------------------
                arcpy.Clip_analysis(in_features=land_use_file_name,
                                    clip_features=watershed_val,
                                    out_feature_class="lu_" + watershed_name)
                if is_gui:
                    msg = "{}: land use clip analysis complete: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Adds land use lookup bin and description
                helpers.fasterJoin(fc="lu_" + watershed_name,
                                   fcField=land_use_field,
                                   joinFC=land_use_LU_file_name,
                                   joinFCField=land_use_LU_code_field,
                                   fields=(land_use_LU_bin_field,
                                           land_use_LU_desc_field,
                                           land_use_LU_class_field))

                # Dissolve land use
                land_use_clip = arcpy.Dissolve_management(
                    in_features="lu_" + watershed_name,
                    out_feature_class="luD_" + watershed_name,
                    dissolve_field=[
                        land_use_field, land_use_LU_desc_field,
                        land_use_LU_bin_field, land_use_LU_class_field
                    ],
                    statistics_fields="",
                    multi_part="SINGLE_PART")
                if is_gui:
                    msg = "{}: land use dissolve complete: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Check size of land use area, stop analysis if no data found.
                if int(arcpy.GetCount_management(land_use_clip).getOutput(
                        0)) > 0:
                    if is_gui:
                        msg = "{}: Land use clip and dissolve has data, continuing analysis...".format(
                            watershed_name)
                        arcpy.AddMessage(msg)
                else:
                    if is_gui:
                        msg = "{}: Land use clip and dissolve yielded no data, skipping watershed...".format(
                            watershed_name)
                        arcpy.AddMessage(msg)
                    break

                # Clip soils
                arcpy.Clip_analysis(in_features=soils_file_name,
                                    clip_features=watershed_val,
                                    out_feature_class="soils_" +
                                    watershed_name)
                if is_gui:
                    msg = "{}: soil clip analysis complete: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                soils_clip = arcpy.Dissolve_management(
                    in_features="soils_" + watershed_name,
                    out_feature_class="soilsD_" + watershed_name,
                    dissolve_field=soils_field,
                    statistics_fields="",
                    multi_part="SINGLE_PART")
                if is_gui:
                    msg = "{}: soils dissolve analysis complete: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                if int(arcpy.GetCount_management(soils_clip).getOutput(0)) > 0:
                    if is_gui:
                        msg = "{}: Soils clip and dissolve contains data, continuing analysis...".format(
                            watershed_name)
                        arcpy.AddMessage(msg)
                else:
                    if is_gui:
                        msg = "{}: Soils clip and dissolve yielded no rows, skipping watershed...".format(
                            watershed_name)
                        arcpy.AddMessage(msg)
                    break

                # Intersect Land Use and Soils ----------------------------------------
                intersect_land_use_and_soils = arcpy.Intersect_analysis(
                    in_features=[land_use_clip, soils_clip],
                    out_feature_class="int_" + watershed_name,
                    join_attributes="NO_FID")
                if is_gui:
                    msg = "{}: land use and soils intersect complete: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                intersect_land_use_and_soils_singles = arcpy.MultipartToSinglepart_management(
                    in_features=intersect_land_use_and_soils,
                    out_feature_class="intX_" + watershed_name)
                if is_gui:
                    msg = "{}: Multipart to single part complete: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                intersect = helpers.elimSmallPolys(
                    fc=intersect_land_use_and_soils_singles,
                    outName=os.path.join(workspace, out_file_name,
                                         watershed_name),
                    clusTol=0.005)
                if is_gui:
                    msg = "{}: elimSmallPolys: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Add unique ID field -------------------------------------------------
                arcpy.AddField_management(in_table=intersect,
                                          field_name='uID',
                                          field_type='LONG')
                with arcpy.da.UpdateCursor(intersect,
                                           ('OID@', 'uID')) as cursor:
                    for row in cursor:
                        row[1] = row[0]
                        cursor.updateRow(row)
                if is_gui:
                    msg = "{}: uID field added: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Add Slope bin field -------------------------------------------------
                helpers.rasterAvgs(intersect, slope_raster, 'slope',
                                   watershed_name)
                arcpy.AddField_management(intersect, slope_bin_field, "TEXT")
                if is_gui:
                    msg = "{}: slope bin field added: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Precipitation -------------------------------------------------------
                helpers.rasterAvgs(intersect, precipitation_raster,
                                   'precipitation', watershed_name)
                if is_gui:
                    msg = "{}: Precipitation added: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Add soils, land use, and slope fields -------------------------------
                arcpy.AddField_management(intersect, "watershed", "TEXT")
                arcpy.AddField_management(intersect, soils_bin_field, "TEXT")
                arcpy.AddField_management(intersect, "land_use", "LONG")
                with arcpy.da.UpdateCursor(
                        intersect,
                    ("watershed", soils_bin_field, soils_field, "land_use",
                     land_use_field, slope_bin_field, 'slope_mean')) as cursor:
                    for row in cursor:
                        # Shift columns
                        row[0] = watershed_name
                        row[1] = row[2]
                        row[3] = row[4]

                        # Add slope bin to feature data
                        slope_bin = filter(lambda x: x[0] <= row[6] < x[1],
                                           slope_bins)
                        if len(slope_bin) > 0:
                            slope_bin = str(
                                slope_bin[0]).strip('[').strip(']').replace(
                                    ', ', '-')
                        else:
                            slope_bin = "NaN"
                        row[5] = slope_bin

                        cursor.updateRow(row)
                if is_gui:
                    msg = "{}: soils, land use, and slope fields added: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Add land use code fields ---------------------------------------------
                code_field = 'code_' + land_use_LU_bin_field
                base_field = 'runoff_vol_' + runoff_coeff_field
                arcpy.AddField_management(intersect, code_field, "DOUBLE")
                arcpy.AddField_management(intersect, base_field, "DOUBLE")
                if is_gui:
                    msg = "{}: land use code and runoff volume fields added: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Write in values for new fields --------------------------------------
                with arcpy.da.UpdateCursor(
                        intersect, (soils_bin_field, land_use_LU_bin_field,
                                    slope_bin_field, code_field)) as cursor:
                    for row in cursor:
                        # arcpy.AddMessage("{},{},{},{}".format(row[0],row[1],row[2],row[3]))
                        # TODO: Identify why NaNs exist
                        slpBin1 = int(
                            row[2].split('-')[0]) if row[2] != 'NaN' else 0
                        slpBinVal = [
                            k[2] for k in slope_bins_w_codes if k[0] == slpBin1
                        ][0]
                        row[3] = helpers.calculateCode(slpBinVal, row[0],
                                                       float(row[1]),
                                                       soils_bin_field)
                        cursor.updateRow(row)
                if is_gui:
                    msg = "{}: land use codes added: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Join runoff coeff lookup table and calculate runoff volume
                arcpy.AddField_management(intersect, runoff_coeff_field,
                                          "Double")
                with arcpy.da.UpdateCursor(
                        intersect, (runoff_coeff_field, code_field)) as cursor:
                    for row in cursor:
                        row[0] = codes_to_coeff_lookup[row[1]]
                        cursor.updateRow(row)
                if is_gui:
                    msg = "{}: output fields added: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Convert precipitation from mm to m and multiple by runoff vol.
                with arcpy.da.UpdateCursor(
                        in_table=intersect,
                        field_names=[
                            'SHAPE@AREA', runoff_coeff_field, base_field,
                            'precipitation_mean'
                        ],
                        where_clause='"{0}" is not null'.format(
                            runoff_coeff_field)) as cursor:
                    for row in cursor:
                        # convert ppt from mm to m and multiply by area and runoff coeff
                        row[2] = (row[3] / 1000.0) * row[0] * row[1]
                        cursor.updateRow(row)
                if is_gui:
                    msg = "{}: precipitation converted: {}".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Update statistics writer --------------------------------------------
                writer.add_fc_table(
                    os.path.join(workspace, out_file_name, watershed_name))
                if is_gui:
                    msg = "{}: statistics computed: {}\n".format(
                        watershed_name, helpers.format_time(start_time))
                    arcpy.AddMessage(msg)

                # Increment count -----------------------------------------------------
                cnt += 1

            except Exception as error:
                if is_gui:
                    msg = "{}: Error computing analysis: {}".format(
                        watershed_name, error)
                    arcpy.AddMessage(msg)
                watershed_errors.append((watershed_name, error))
                continue

    # Write stats to csv files and watersheds with errors
    writer.write_ws_stats_table(os.path.join(workspace, "results_wsStats.csv"))
    writer.write_lu_stats_table(os.path.join(workspace, "results_luStats.csv"))
    if is_gui:
        msg = "Analysis complete: {}".format(helpers.format_time(start_time))
        arcpy.AddMessage(msg)
        if len(watershed_errors) > 0:
            msg = "Errors encountered while computing analysis for the following watersheds:"
            arcpy.AddMessage(msg)
            for (watershed_name, error) in watershed_errors:
                arcpy.AddMessage(watershed_name)
        else:
            msg = "There were no errors during the analysis"
            arcpy.AddMessage(msg)
def block(block_hash_or_height):
    try:
        the_block_height = int(block_hash_or_height)
    except ValueError:
        try:
            block_lookup = db.session.query(Blocks).filter_by(
                hash=block_hash_or_height.lower()).first()
            the_block_height = int(block_lookup.height)
        except (AttributeError, ValueError):
            return render_template('404.html',
                                   error="Not a valid block height/hash"), 404

    latest_block_height = int(
        db.session.query(Blocks).order_by(desc('height')).first().height)
    if the_block_height in range(0, latest_block_height + 1):
        the_block = db.session.query(Blocks).filter_by(
            height=the_block_height).first()
        if the_block is not None:
            block_hash = the_block.hash
            if the_block_height != 0:
                previous_block_hash = the_block.prevhash
            else:
                previous_block_hash = None

            if the_block_height != latest_block_height:
                next_block_hash = the_block.nexthash
            else:
                next_block_hash = None

            version = the_block.version
            merkle_root = the_block.merkleroot
            the_time = the_block.time
            formatted_time = format_time(the_block.time)
            difficulty = the_block.difficulty
            bits = the_block.bits
            cumulative_difficulty = the_block.cumulative_difficulty
            nonce = the_block.nonce
            transactions = db.session.query(TXs).filter_by(
                block_height=the_block_height).all()
            value_out = the_block.value_out
            # TODO
            transaction_fees = 'PLACEHOLDER'

            return render_template(
                'block.html',
                block_hash=block_hash,
                previous_block_hash=previous_block_hash,
                next_block_hash=next_block_hash,
                block_height=the_block_height,
                version=version,
                merkle_root=merkle_root,
                time=the_time,
                formatted_time=formatted_time,
                difficulty=difficulty,
                bits=bits,
                cumulative_difficulty=cumulative_difficulty,
                nonce=nonce,
                the_transactions=transactions,
                value_out=value_out,
                transaction_fees=transaction_fees,
                # TODO
                average_coin_age='?')
        else:
            return render_template('404.html',
                                   error="Not a valid block height/hash"), 404
    else:
        return render_template('404.html',
                               error="Not a valid block height/hash"), 404