Exemplo n.º 1
0
    def read_data(fname):
        """Create numpy representation of text from path."""
        log.info("Processing text at path: {}".format(fname))
        if not os.path.isfile(fname):
            log.warn("{} is an invalid path".format(fname))
            return False

        class sample_text:
            pass

        with open(fname) as f:
            content = f.readlines()
        content = [x.strip() for x in content]
        content = [content[i].split() for i in range(len(content))]
        content = np.array(content)
        sample_text.content = np.reshape(content, [
            -1,
        ])
        sample_text.len = sample_text.content.shape[0]
        sample_text.sample = sample_text.content[np.random.randint(
            0, sample_text.len)]
        # this should be red if lower than x and green if above y.
        log.debug("Sample text is {} words long.".format(sample_text.len))
        log.info("Sample word from text:\n\t{}".format(sample_text.sample))
        log.info("File Loaded successfully.")
        return sample_text
Exemplo n.º 2
0
    def read_data(self, fname=None, normalize_digits=True):
        """Create numpy representation of text from path."""
        if fname is None:
            fname = "text/test.txt"
        log.info("Processing text at path: {}".format(fname))
        if not os.path.isfile(fname):
            log.warn("{} is an invalid path".format(fname))
            return False

        class sample_text:
            pass

        # starting
        vocab = {}
        with open(fname) as f:
            counter = 0
            for line in f:
                counter += 1
                if counter % 100000 == 0:
                    print("Processing line #{}...".format(counter))
                # print(line)
                tokens = self.basic_tokenizer(line)
                for w in tokens:
                    word = re.sub(self._DIGIT_RE, "0",
                                  w) if normalize_digits else w
                    if word in vocab:
                        vocab[word] += 1
                    else:
                        vocab[word] = 1
        # finishing
        vocab_list = self._START_VOCAB + sorted(
            vocab, key=vocab.get, reverse=True)
        log.info('>> Full Vocabulary Size : {}'.format(len(vocab_list)))

        # add words to database
        for index, word in enumerate(vocab_list):
            log.debug("adding word \"{}\" to database @ {}".format(
                word, index))
            self.idx2word.write_data(str(index), str(word))
            self.word2idx.write_data(str(word), str(index))
            # how much time does this add???
            read_back = int(self.word2idx.read_data(str(word)))
            assert index == read_back

        # sanity check
        if False:
            encoded_sample = self.encode_line(line)
            print("Sample Encoded Line:\n{} == {}".format(
                line, encoded_sample))
            decoded_sample = self.decode_line(encoded_sample)
            print("Sample Decoded line: {}".format(decoded_sample))

        # fin
        log.info("File Loaded successfully.")
        return True
Exemplo n.º 3
0
def run(tags):
    log.info("Follow mode activated", tags=tags)

    if tags is None or len(tags) < 1:
        raise ValueError("You must specify at least one tag")

    log.debug("initializing...")
    steem = Steem(keys=[cred.key])
    account = Account(cred.id, steem)
    chain = Blockchain(steem)
    log.debug("ready", steem=steem, account=account, blockchain=chain)

    log.info("Gathering our following list...")
    following = account.get_following()
    pending = []
    log.info("Following list retrieved", count=len(following))

    log.info("Watching for new posts...")
    while True:
        stream = map(Post, chain.stream(filter_by=['comment']))

        try:
            for post in stream:
                count = len(pending)
                if count > 0:
                    copy = list(pending)
                    for i in range(count):
                        if have_bandwidth(steem, account):
                            user = copy[i]
                            log.info("following user", user=user)
                            steem.follow(user, account=cred.id)
                            del pending[0]

                        else:
                            log.warn("Waiting for more bandwidth before following another user")
                            break


                if post.is_main_post():
                    log.debug("found a top-level post", author=post.author, tags=post.tags)

                    if post.author != cred.id:
                        for tag in tags:
                            if tag in post.tags:
                                if post.author not in following:
                                    pending.append(post.author)
                                    following.append(post.author)
                                    break

        except PostDoesNotExist as e:
            log.debug("Post has vanished", exception=e)

        except RPCError as e:
            log.error("RPC problem while streaming posts", exception=e)
Exemplo n.º 4
0
 def main(self):
     if self.database:
         log.debug("found that database")
         if self.write_data('4', '20'):
             log.debug("wrote that data")
             pass
         twenty = self.read_data('4')
         log.debug("read data test: {}".format(twenty))
         if int(twenty) is 20:
             return True
         log.warn("see here... see.")
         return False
     else:
         log.warn("Not logging into the database.")
     return False
Exemplo n.º 5
0
    def get_text_file(self, file_, trunk=True):
        """Gotta have docstrings."""
        if not os.path.isfile(file_):
            log.warn("{} is an invalid path".format(file_))
            return False

        class sample_text():
            pass

        msg = "Text Results:\n"
        with open(file_) as f:
            content = f.readlines()
        sample_text.all_content = content
        content = [x.strip() for x in content]
        print(len(content))
        content = [content[i].split() for i in range(len(content))]
        content = np.array(content)
        # print(content)
        sample_text.content = np.reshape(content, [
            -1,
        ])
        print(content.shape[:])
        sample_text.nwords = 0
        sample_text.word_set = []
        sample_text.token_to_vector = {}

        for this_line in sample_text.all_content:
            this_line = this_line.strip()
            words_in_line = this_line.split(' ')
            # TOKEN is the first word in the line
            token = words_in_line[0]
            # VECTOR is the line relitive to the token
            vector = words_in_line[1:]  # this line minus the token
            # one hot encoded...
            sample_text.token_to_vector[token] = vector
            for word in words_in_line:
                sample_text.nwords += 1

        del sample_text.all_content  # maybe ... save on some rams
        msg += "Num Words: {}\n".format(sample_text.nwords)
        sample_text.uwords = sorted(list(set(sample_text.word_set)))
        msg += "Num Unique Words: {}\n".format(len(sample_text.uwords))
        msg += "Num of Sentences or Unique Vectors: {}\n".format(
            len(sample_text.token_to_vector))

        log.debug(msg)
        return sample_text
Exemplo n.º 6
0
    def main(self):
        """Test of connection settings."""
        if self.database:
            log.debug("found that database")
            if self.write_data('4', '20'):
                log.debug("wrote that data")
            twenty = self.read_data('4')
            log.debug("read data test: {}, type: {}".format(
                twenty, type(twenty)))
            # Gotta stop comparing to literal.
            if int(float(twenty)) is 20:
                return True

            log.warn("see here... see.")
            return False
        else:
            log.warn("Not logging into the database.")
        return False
Exemplo n.º 7
0
 def load_tf_model(self, folder=None):
     """This is standard tf_utils stuff."""
     if folder is None: folder = self.logs_path
     log.info("Loading Model: {}".format("Model_Name"))
     if self.sess:
         self.sess.close()
     try:
         self.sess = tf.InteractiveSession()
         checkpoint_file = tf.train.latest_checkpoint(folder)
         log.info("trying: {}".format(folder))
         saver = tf.train.import_meta_graph(checkpoint_file + ".meta")
         log.debug("loading modelfile {}".format(checkpoint_file))
         self.sess.run(tf.global_variables_initializer())
         saver.restore(self.sess, checkpoint_file)
         log.info("model successfully Loaded: {}".format(checkpoint_file))
         self.saver = saver
         self.model_loaded = True
     except Exception as e:
         log.warn("This folder failed to produce a model {}\n{}".format(folder, e))
         return False
     return True
Exemplo n.º 8
0
    def process_network(
        self,
        sample_set,
        network,
    ):
        """This is standard tf_utils stuff."""

        # DEFINES!!
        training_data = sample_set.content

        # dictionary = sample_set.dictionary
        # reverse_dictionary = sample_set.reverse_dictionary
        n_input = self.n_input
        vocab_size = sample_set.dict_len

        # start here
        start_time = time.time()
        session = self.sess
        #if self.sess:
        #    session = self.sess
        #else:
        #    session = tf.Session()
        #session.run(network.init_op)
        writer = tf.summary.FileWriter(self.logs_path)
        _step = 0
        offset = random.randint(0, n_input + 1)
        end_offset = n_input + 1
        acc_total = 0
        loss_total = 0
        display_step = 10
        pred_msg = ' "{}" *returns* "{}" *vs* "{}"\n'
        msg = "step: {0:}, offset: {1:}, acc_total: {2:.2f}, loss_total: {3:.2f}"
        log.debug("Starting the Train Session:")
        # start by adding the whole graph to the Tboard
        writer.add_graph(session.graph)

        for i in range(self.train_iters):
            # Generate a minibatch. Add some randomness on selection process.
            if offset > (len(training_data) - end_offset):
                offset = random.randint(0, self.n_input + 1)
            symbols_in_keys = []
            for i in range(offset, offset + self.n_input):
                symbols_in_keys.append(
                    self.database.read_data(str(training_data[i])))
            symbols_in_keys = np.reshape(np.array(symbols_in_keys),
                                         [-1, n_input, 1])

            symbols_out_onehot = np.zeros([vocab_size], dtype=float)
            # symbols_out_onehot[dictionary[str(training_data[offset + n_input])]] = 1.0
            one_hot = self.database.read_data(
                str(training_data[offset + n_input]))
            if one_hot is None:
                one_hot = 0
            symbols_out_onehot[int(one_hot)] = 1.0
            symbols_out_onehot = np.reshape(symbols_out_onehot, [1, -1])

            feed_dict = {
                network.input_word: symbols_in_keys,
                network.input_label: symbols_out_onehot
            }

            try:
                _, acc, loss, onehot_pred, _step, summary = session.run(
                    [
                        network.optimizer, network.accuracy, network.cost,
                        network.final_layer, network.global_step,
                        network.merged
                    ],
                    feed_dict=feed_dict)

                log.debug("###WORKING {}!!####".format(_step))
                # pool data results
                loss_total += loss
                acc_total += acc
                if i % 25 == 0:
                    # acc pool
                    print("###WORKING2!!####")
                    acc_total = (acc_total * 100) / display_step
                    loss_total = loss_total / display_step
                    # gather datas
                    try:
                        symbols_in = [
                            training_data[i]
                            for i in range(offset, offset + n_input)
                        ]
                        symbols_out = training_data[offset + n_input]
                        symbols_out_pred = self.rev_dict.read_data(
                            int(
                                tf.argmax(onehot_pred,
                                          1).eval(session=session)))
                        # do save actions
                        log.info("Saving the Train Session:\n{}\n{}".format(
                            msg.format(_step, offset, acc_total, loss_total),
                            pred_msg.format(symbols_in, symbols_out,
                                            symbols_out_pred)))
                    except Exception as e:
                        log.warn("Bad Things are happening here: {}\n\t{}\n{}".
                                 format(elapsed(time.time() - start_time), e))
                        pass
                    # Save Functions
                    self.saver.save(session,
                                    self.logs_path + self.filename,
                                    global_step=network.global_step)
                    writer.add_summary(summary, global_step=_step)
                    # projector.visualize_embeddings(writer, network.config)
                    # reset the pooling counters
                    acc_total = 0
                    loss_total = 0
                # end of loop increments
                offset += (n_input + 1)
            except Exception as e:
                log.warn("BLowing it DUDE... {}\nError: {}".format(_step, e))
                pass
        # Save Functions
        self.saver.save(session,
                        self.logs_path + self.filename,
                        global_step=network.global_step)
        writer.add_summary(summary, global_step=_step)
        # projector.visualize_embeddings(writer, network.config)
        log.info("Optimization Finished!")
        log.debug("Elapsed time: {}".format(elapsed(time.time() - start_time)))
        return (loss_total, acc_total)
        session.close()
Exemplo n.º 9
0
        #log.debug("Found correct_pred op: {}".format(params.correct_pred))
        params.accuracy = tf.get_collection_ref('accuracy')[0]
        #log.debug("Found accuracy op: {}".format(params.accuracy))
        params.cost = tf.get_collection_ref('cost')[0]
        #log.debug("Found cost op: {}".format(params.cost))
        params.optimizer = tf.get_collection_ref('optimizer')[0]
        #log.debug("Found optimizer op: {}".format(params.optimizer))
        params.init_op = tf.get_collection_ref('init_op')[0]
        # log.debug("Found init_op op: {}".format(params.init_op))
        # params.saver = tf.get_collection_ref('saver')[0]
        # log.debug("Found saver op: {}".format(params.saver))
        params.merged = tf.get_collection_ref('merged')[0]
        # log.debug("Found merged op: {}".format(params.merged))
        # params.config = tf.get_collection_ref('config')[0]
        params.test = "okay"
        self.params = params
        return params


if __name__ == '__main__':
    try:
        os.system('clear')
        app = App()
        if app.main(sys.argv):
            sys.exit("PASSED: Thanks A lot for trying Alphagriffin.com")
        log.warn("Alldone! Alphagriffin.com")

    except KeyboardInterrupt:
        os.system('clear')
        sys.exit("AlphaGriffin.com")
Exemplo n.º 10
0
def process(commit, post):
    log.debug("checking post", post=post.__dict__)

    lines = post.body.splitlines()
    if len(lines) < 2:
        log.warn("this post appears to be empty or lacking timely data", post=post)
        return None

    timely = lines[-1].split(' ')
    if len(timely) < 3:
        log.warn("this post lacks timely data: <date> <time> <tag> ...", post=post)
        return None

    when = datetime.strptime('{} {}'.format(timely[0], timely[1]), '%Y-%m-%d %H:%M')

    if datetime.now() >= when:
        log.info("This post is boiling!", post=post)

        tags = timely[2:]
        meta = {'app' : 'boiler/{}'.format(__version__)}
        link = '-' + post.permlink

        if lines[-2] == '```':
            body = "\n".join(lines[:-2])
        else:
            body = "\n".join(lines[:-1])

        body += "\n---"
        body += "\n<center>*This post made timely by:"
        body += "\n[![Alpha Griffin logo](http://alphagriffin.com/usr/include/ag/favicon/favicon-128.png)"
        body += "\nAlpha Griffin Boiler bot](https://github.com/AlphaGriffin/boiler)"
        body += "\nv" + __version__ + "*</center>"


        newpost = commit.post(
                permlink = link,
                title = post.title,
                author = post.author,
                body = body,
                tags = tags,
                json_metadata = meta,
                self_vote = True
                )
        log.debug("new post committed!", result=newpost)

        body = "This post has boiled! Find it now here:"
        body += "\n* https://steemit.com/@"+post.author+"/"+link
        body += "\n---"
        body += "\n<center>*Timely posts made possible by:"
        body += "\n[![Alpha Griffin logo](http://alphagriffin.com/usr/include/ag/favicon/favicon-128.png)"
        body += "\nAlpha Griffin Boiler bot](https://github.com/AlphaGriffin/boiler)"
        body += "\nv" + __version__ + "*</center>"

        meta['tags'] = [post.category, 'boiled']

        edited = commit.post(
                permlink = post.permlink,
                title = post.title,
                author = post.author,
                body = body,
                tags = meta['tags'],
                json_metadata = meta,
                reply_identifier = construct_identifier(post["parent_author"], post["parent_permlink"])
                )
        log.debug("original post edited!", result=edited)

        return True

    else:
        return False
Exemplo n.º 11
0
    def summarize(self, title, tags):
        log.info("Summarizing market...",
                 symbol=self.symbol,
                 against=self.against)

        if self.testing:
            log.info("TESTING MODE ENABLED")

        ticker = self.api.ticker()
        try:
            ticker = ticker[self.against + '_' + self.symbol]
        except KeyError as e:
            log.error("Currency pair not found in ticker data",
                      symbol=self.symbol,
                      against=self.against,
                      exception=e)
            raise ValueError("Currency pair not found in ticker data")

        tz = get_localzone()
        now = datetime.now(tz)
        nowstr = now.strftime('%Y-%m-%d %H:%M:%S %Z')
        log.debug("got ticker data", now=nowstr, ticker=ticker)

        last = Decimal(ticker['last'])
        if self.against == 'USDT' or self.against == 'USD':
            symbol = '$'
            quant = Decimal('0.00')
        else:
            symbol = ''
            quant = Decimal('0.00000000')
        laststr = symbol + str(last.quantize(quant))
        log.debug("last trade", value=laststr)

        ath = None
        newath = False

        nowfile = path.join(
            dir, 'market.' + self.symbol + '-' + self.against + '.time')
        lastfile = path.join(
            dir, 'market.' + self.symbol + '-' + self.against + '.last')
        img_url = None

        if path.exists(nowfile) and path.exists(lastfile):
            prev = True

            with open(nowfile, 'r') as infile:
                prev_now = datetime.fromtimestamp(int(
                    infile.readline().strip()),
                                                  tz=tz)

            with open(lastfile, 'r') as infile:
                prev_last = Decimal(infile.readline().strip())

            prev_permlink = self.make_permlink(prev_now)
            prev_nowstr = prev_now.strftime('%Y-%m-%d %H:%M:%S %Z')

            change_price = last - prev_last
            if change_price < Decimal('0'):
                change_pricestr = symbol + str(
                    change_price.copy_negate().quantize(quant))
            else:
                change_pricestr = symbol + str(change_price.quantize(quant))

            change_pct = (Decimal('100') * change_price / prev_last).quantize(
                Decimal('0.00'))
            if change_pct < Decimal('0'):
                change_pctstr = str(change_pct.copy_negate()) + '%'
            else:
                change_pctstr = str(change_pct) + '%'

            highest = last
            lowest = last

            fig = plt.figure(figsize=(10, 7), facecolor='k')
            ax = fig.add_subplot(1, 1, 1)
            rect = ax.patch
            rect.set_facecolor('k')
            img_title = self.symbol + '-' + self.against + ' at ' + nowstr
            plt.title(img_title)
            ax.xaxis_date()
            plt.xticks(rotation=25)
            ax.xaxis.set_major_formatter(DateFormatter('%Y-%m-%d %H:%M'))

            # first graph 30-minute candlesticks
            log.info("Plotting 30-minute candlesticks...")

            data = self.api.chartData(pair=self.against + '_' + self.symbol,
                                      start=int(prev_now.strftime("%s")) + 1,
                                      period=1800)

            if len(data) < 0:
                raise ValueError("No data returned")
            elif len(data) == 1:
                try:
                    error = data['error']
                    log.error("Received error from API", error=error)
                    raise ValueError(
                        "Received error from API: {}".format(error))

                except KeyError:
                    if int(data[0]['date']) == 0:
                        raise ValueError(
                            "Too soon! You must wait at least 30 minutes between summaries for candlesticks."
                        )

            for row in data:
                high = Decimal(row['high'])
                if high > highest:
                    highest = high

                low = Decimal(row['low'])
                if low < lowest:
                    lowest = low

                time = datetime.fromtimestamp(int(row['date']))
                popen = Decimal(row['open'])
                close = Decimal(row['close'])

                if close >= popen:
                    color = 'g'
                else:
                    color = 'r'

                vline = Line2D(xdata=(time, time),
                               ydata=(low, high),
                               linewidth=1.5,
                               color=color,
                               antialiased=False)
                oline = Line2D(xdata=(time, time),
                               ydata=(popen, popen),
                               linewidth=1,
                               color=color,
                               antialiased=False,
                               marker=TICKLEFT,
                               markersize=7)
                cline = Line2D(xdata=(time, time),
                               ydata=(close, close),
                               linewidth=1,
                               color=color,
                               antialiased=False,
                               marker=TICKRIGHT,
                               markersize=7)

                ax.add_line(vline)
                ax.add_line(oline)
                ax.add_line(cline)

            # then graph 5-minute lines
            log.info("Plotting 5-minute lines...")

            data = self.api.chartData(pair=self.against + '_' + self.symbol,
                                      start=int(prev_now.strftime("%s")) + 1,
                                      period=300)

            if len(data) < 0:
                raise ValueError("No data returned")
            elif len(data) == 1:
                try:
                    error = data['error']
                    log.error("Received error from API", error=error)
                    raise ValueError(
                        "Received error from API: {}".format(error))

                except KeyError:
                    if int(data[0]['date']) == 0:
                        raise ValueError(
                            "Too soon! You must wait at least 5 minutes between summaries."
                        )

            begin = None

            for row in data:
                high = Decimal(row['high'])
                if high > highest:
                    highest = high

                low = Decimal(row['low'])
                if low < lowest:
                    lowest = low

                time = int(row['date'])
                popen = Decimal(row['open'])
                close = Decimal(row['close'])

                if begin is None:
                    begin = popen

                line = Line2D(xdata=(datetime.fromtimestamp(time),
                                     datetime.fromtimestamp(time + 300)),
                              ydata=(begin, close),
                              linewidth=0.7,
                              color='#FFFF00',
                              antialiased=True)

                ax.add_line(line)
                begin = close

            higheststr = symbol + str(highest.quantize(quant))
            loweststr = symbol + str(lowest.quantize(quant))

            athfile = path.join(
                dir, 'market.' + self.symbol + '-' + self.against + '.ath')
            if path.exists(athfile):
                with open(athfile, 'r') as infile:
                    ath = Decimal(infile.readline().strip())

                if highest > ath:
                    ath = highest
                    newath = True

                    if not testing:
                        with open(athfile, 'w') as out:
                            out.write(str(ath))

            ax.xaxis.grid(True, color='#555555', linestyle='dotted')
            ax.yaxis.grid(True, color='#555555', linestyle='solid')
            plt.tight_layout()
            ax.autoscale_view()

            # save image to file or memory buffer
            if self.testing:
                imgfile = '/tmp/' + self.symbol + '-' + self.against + '.png'
                fig.savefig(imgfile)
                log.info("Market graph PNG saved", file=imgfile)
            else:
                img = io.BytesIO()
                fig.savefig(img, format='png')
                img.seek(0)

            plt.close(fig)

            # now upload result to imgur
            if not self.testing:
                log.info("Uploading plot to imgur...")

                img_b64 = base64.standard_b64encode(img.read())
                client = 'bbe2ecf93d88915'
                headers = {'Authorization': 'Client-ID ' + client}
                imgur_data = {'image': img_b64, 'title': img_title}
                req = Request(url='https://api.imgur.com/3/upload.json',
                              data=urlencode(imgur_data).encode('ASCII'),
                              headers=headers)
                resp = urlopen(req).read()
                resp = json.loads(resp)
                log.debug("Got response from imgur", resp=resp)

                if resp['success'] == True:
                    img_url = resp['data']['link']
                    log.info("Image uploaded successfully", url=img_url)

                else:
                    log.error("Non-successful response from imgur", resp=resp)
                    raise ValueError("Non-successful response from imgur")

        else:
            prev = False

        body = "Market Summary for " + self.symbol
        body += "\n=="
        body += "\n* All prices in *" + self.against + "*"
        body += "\n---"
        body += "\n"
        if prev:
            if change_pct > Decimal('0'):
                body += "\nUp " + change_pctstr
                title += ": Up " + change_pctstr
            elif change_pct < Decimal('0'):
                body += "\nDown " + change_pctstr
                title += ": Down " + change_pctstr
            else:
                body += "\nFlat"
                title += ": Flat"
            if newath:
                body += " (New All Time High Achieved)"
                title += " -- New All Time High!"
            body += "\n-"
            body += "\n" + self.symbol + " **"
            if change_price > Decimal('0'):
                body += "gained " + change_pricestr
            elif change_price < Decimal('0'):
                body += "lost " + change_pricestr
            else:
                body += "had no change"
            body += "** since the [last market summary]"
            body += "(https://steemit.com/@" + account.id + "/" + prev_permlink + ")"
            if change_pct > Decimal('0'):
                body += ", a change of **" + change_pctstr + "**"
            elif change_pct < Decimal('0'):
                body += ", a change of **-" + change_pctstr + "**"
            body += "."
        else:
            body += "\n*This is the first market summary, so no previous comparison data is available.*"
        body += "\n"
        body += "\n* Last trade: *" + laststr + "*"
        if prev:
            body += "\n* Highest trade: *" + higheststr + "*"
            if newath:
                body += " (new all time high)"
            body += "\n* Lowest trade: *" + loweststr + "*"
            if img_url is not None:
                body += "\n"
                body += "\n[![market activity plot](" + img_url + ")](" + img_url + ")"
        body += "\n"
        body += "\n---"
        body += "\n"
        body += "\n* Snapshot taken at *" + nowstr + "*"
        if prev:
            body += "\n* Previous snapshot: *[" + prev_nowstr + "]"
            body += "(https://steemit.com/@" + account.id + "/" + prev_permlink + ")*"
        body += "\n* Quote data from [Poloniex](http://poloniex.com)"
        body += "\n"
        body += "\n<center>Happy trading... stay tuned for the next summary!</center>"
        body += "\n"
        body += "\n---"
        body += "\n<center>*This market summary produced automatically by:"
        body += "\n[![Alpha Griffin logo](http://alphagriffin.com/usr/include/ag/favicon/favicon-128.png)"
        body += "\nAlpha Griffin Boiler bot](https://github.com/AlphaGriffin/boiler)"
        body += "\nv" + __version__ + "*</center>"

        if self.testing:
            print(body)

        permlink = self.make_permlink(now)
        tries = 0
        post = None

        while tries < self.max_tries:
            try:
                log.info("Posting summary...",
                         permlink=permlink,
                         title=title,
                         last=laststr,
                         tags=tags)

                if self.testing:
                    log.warn("Not actually going to post (testing mode)")
                    break

                post = self.commit.post(permlink=permlink,
                                        title=title,
                                        author=account.id,
                                        body=body,
                                        tags=tags,
                                        self_vote=True)

                break

            except RPCError as e:
                log.warn(
                    "Got RPC error while posting, trying again in 1 minute...",
                    exception=e)
                tries += 1
                sleep(60)

        if post is not None:
            log.info("Summary posted successfully", post=post)

            with open(nowfile, 'w') as out:
                out.write(now.strftime("%s"))

            with open(lastfile, 'w') as out:
                out.write(str(last))

            return True

        else:
            if not self.testing:
                log.error("Failed to post summary")

            return False