Ejemplo n.º 1
0
def send_one_off(email_type):
    with db_utils.request_context():
        for e in db_models.Presale.query.all():
            print e.email
            email_types.send_email_type(email_type, DEFAULT_SENDER, e.email)

        for e in db_models.EmailList.query.all():
            print e.email
            email_types.send_email_type(email_type, DEFAULT_SENDER, e.email)
Ejemplo n.º 2
0
def send_one_off(email_type):
    with db_utils.request_context():
        # the message log takes care of deduping emails that may appear in multiple tables
        for e in db_models.Presale.query.all():
            print(e.email)
            email_types.send_email_type(email_type, DEFAULT_SENDER, e.email)

        for e in db_models.EmailList.query.all():
            print(e.email)
            email_types.send_email_type(email_type, DEFAULT_SENDER, e.email)

        for e in db_models.Interest.query.all():
            print(e.email)
            email_types.send_email_type(email_type, DEFAULT_SENDER, e.email)
            continue

        update = update + 1
        instance = db_common.get_or_create(db.session,
                                           db_models.CirculatingSupply,
                                           snapshot_date=time_.fromtimestamp(
                                               result["timeStamp"]))
        instance.supply_amount = new_supply
        db.session.add(instance)

        print("%s %s") % (time_.fromtimestamp(result["timeStamp"]), new_supply)

    if do_it:
        db.session.commit()

    print "Have parsed %s/%s transactions" % (update, len(results["result"]))
    print "Circulating supply at the end of all txs: %s" % new_supply


def main(do_it):
    delete_bad_data(do_it)
    fill_missing_txs(do_it)


if __name__ == '__main__':
    with db_utils.request_context():
        parser = argparse.ArgumentParser()
        parser.add_argument('--do_it', action='store_true')
        args = parser.parse_args()
        main(args.do_it)
Ejemplo n.º 4
0
def send_one_off(email_type):
    with db_utils.request_context():
        # the message log takes care of deduping emails that may appear in multiple tables
        for e in db_models.EmailList.query.filter_by(unsubscribed=False):
            log(e.email)
            email_types.send_email_type(email_type, DEFAULT_SENDER, e.email)
Ejemplo n.º 5
0
        if failed_hashes:
            logging.warning("failed to pin hashes %s", failed_hashes)

    # unpin content that doesn't belong to a listing
    #
    # TODO(cuongdo): Add a grace period for unpinning, so that we don't
    # potentially unpin content that's associated with new listings. Note that
    # unpinning allows GC to *potentially* happen. Once that happens, it's a race
    # between the IPFS GC and the next run of this tool.
    logging.info("hashes to unpin: %s", hashes_to_unpin)
    if hashes_to_unpin and not dry_run:
        unpinned_hashes = set(
            ipfs_helper.unpin_hashes(
                *hashes_to_unpin)['Pins'])
        failed_hashes = unpinned_hashes - hashes_to_unpin
        if failed_hashes:
            logging.warning("failed to unpin hashes %s", failed_hashes)

    logging.info("finished")


if __name__ == '__main__':
    db_utils.request_context().push()
    parser = argparse.ArgumentParser(
        description="Pins content in the IPFS gateway if it's associated with " +
        "an Origin listing and unpins it if there's no associated listing.")
    parser.add_argument('--dry-run', action='store_true',
                        help="output changes but do not execute them")
    args = parser.parse_args()
    _scan_listings(args.dry_run)