Exemple #1
0
def main():
    usage = 'usage: %s <host directory> <source directory> <id>'
    if len(sys.argv) != 4:
        raise ValueError(usage)

    directory = sys.argv[1]
    source_dir = sys.argv[2]
    id = base64.b64encode(hashlib.sha1(sys.argv[3]).digest(), '-_')

    if get_database is not None:
        vis_database = get_database()
    else:
        vis_database = None

    if vis_database is not None:
        filenames = glob.glob(os.path.join(source_dir, '*.jpg'))
        for filename in filenames:
            img = Image.open(filename)
            img.thumbnail((128, 64), Image.ANTIALIAS)
            outfile = StringIO.StringIO()
            img.save(outfile, 'JPEG')
            vis_database.enqueue_photo(filename,
                                       base64.b64encode(outfile.getvalue()))

    for filename in glob.glob(os.path.join(source_dir, '*.jpg')):
        donate(directory, filename, id, vis_database)
Exemple #2
0
    def __init__(self, database, killswitch, estimate_db=None):
        super(DonatedVectorProvider, self).__init__()

        self._db = database
        self._killswitch = killswitch
        self._estimate_db = estimate_db

        if get_database is not None:
            self._vis_db = get_database()
        else:
            self._vis_db = None
Exemple #3
0
    def __init__(self, database, killswitch, estimate_db=None):
        super(DonatedVectorProvider, self).__init__()

        self._db = database
        self._killswitch = killswitch
        self._estimate_db = estimate_db

        if get_database is not None:
            self._vis_db = get_database()
        else:
            self._vis_db = None
def main():
    usage = 'usage: %s <photos directory>'
    parser = OptionParser(usage=usage)
    parser.set_defaults(always=False)
    parser.add_option('-a',
                      '--always-upload',
                      dest='always',
                      action='store_true',
                      help="Don't check if photos have already been uploaded")
    options, args = parser.parse_args()

    if len(args) != 1:
        parser.error('Must specify photos directory')

    directory = args[0]
    title = 'Yellowstone photos'
    tags = ['nature', 'vacation']

    flickr = auth_flickr()

    if get_database is not None:
        vis_database = get_database()
    else:
        vis_database = None

    while True:
        today = datetime.datetime.utcnow()

        if options.always or need_to_upload(flickr, tags):
            delete_photos(flickr, tags)

            if vis_database is not None:
                filenames = glob.glob(os.path.join(directory, '*.jpg'))
                for filename in filenames:
                    img = Image.open(filename)
                    img.thumbnail((128, 64), Image.ANTIALIAS)
                    outfile = StringIO.StringIO()
                    img.save(outfile, 'JPEG')
                    vis_database.enqueue_photo(
                        filename, base64.b64encode(outfile.getvalue()))

            address = base64.b64encode(format_address(today))
            for filename in glob.glob(os.path.join(directory, '*.jpg')):
                donate(flickr, filename, address, title, tags, vis_database)

        while today.day == datetime.datetime.utcnow().day:
            time.sleep(1)
def main():
    usage = 'usage: %s <photos directory>'
    parser = OptionParser(usage=usage)
    parser.set_defaults(always=False)
    parser.add_option('-a', '--always-upload', dest='always',
                      action='store_true',
                      help="Don't check if photos have already been uploaded")
    options, args = parser.parse_args()

    if len(args) != 1:
        parser.error('Must specify photos directory')

    directory = args[0]
    title = 'Yellowstone photos'
    tags = ['nature', 'vacation']

    flickr = auth_flickr()

    if get_database is not None:
        vis_database = get_database()
    else:
        vis_database = None

    while True:
        today = datetime.datetime.utcnow()

        if options.always or need_to_upload(flickr, tags):
            delete_photos(flickr, tags)

            if vis_database is not None:
                filenames = glob.glob(os.path.join(directory, '*.jpg'))
                for filename in filenames:
                    img = Image.open(filename)
                    img.thumbnail((128, 64), Image.ANTIALIAS)
                    outfile = StringIO.StringIO()
                    img.save(outfile, 'JPEG')
                    vis_database.enqueue_photo(filename, base64.b64encode(outfile.getvalue()))

            address = base64.b64encode(format_address(today))
            for filename in glob.glob(os.path.join(directory, '*.jpg')):
                donate(flickr, filename, address, title, tags, vis_database)

        while today.day == datetime.datetime.utcnow().day:
            time.sleep(1)
Exemple #6
0
def main():
    db = get_database()

    db.add_article_sender("Test article<br/>"*100);

    directory = sys.argv[1]
    filenames = glob.glob(os.path.join(directory, '*.jpg'))
    for filename in filenames:
        img = Image.open(filename)
        img.thumbnail((64, 64))
        outfile = StringIO.StringIO()
        img.save(outfile, 'JPEG')
        db.enqueue_photo(filename, base64.b64encode(outfile.getvalue()))
        time.sleep(1)

    for filename in filenames:
        db.embed_photo(filename)
        time.sleep(1)
        db.upload_photo(filename)
Exemple #7
0
def main():
    db = get_database()

    db.add_article_sender("Test article<br/>" * 100)

    directory = sys.argv[1]
    filenames = glob.glob(os.path.join(directory, '*.jpg'))
    for filename in filenames:
        img = Image.open(filename)
        img.thumbnail((64, 64))
        outfile = StringIO.StringIO()
        img.save(outfile, 'JPEG')
        db.enqueue_photo(filename, base64.b64encode(outfile.getvalue()))
        time.sleep(1)

    for filename in filenames:
        db.embed_photo(filename)
        time.sleep(1)
        db.upload_photo(filename)
def main():
    usage = 'usage: %s [options]'
    parser = OptionParser(usage=usage)
    parser.set_defaults(database='waiting_keys.sqlite', api_key=os.environ['COMMUNITY_FLICKR_API_KEY'], api_secret=os.environ['COMMUNITY_FLICKR_SECRET'])
    parser.add_option('-d', '--database', dest='database', action='store', type='string', help='Waiting keys database')
    parser.add_option('-k', '--flickr-api-key', dest='api_key', action='store', type='string', help='Flickr API key')
    parser.add_option('-s', '--flickr-secret', dest='api_secret', action='store', type='string', help='Flickr API secret')
    (options, args) = parser.parse_args()

    if len(args) != 0:
        parser.error('Invalid argument')

    conn = sqlite3.connect(options.database)
    conn.row_factory = sqlite3.Row
    conn.execute('''CREATE TABLE IF NOT EXISTS waiting
                    (key TEXT, title TEXT, token TEXT)''')
    conn.execute('''CREATE TABLE IF NOT EXISTS tags
                    (tag TEXT, waiting_id INTEGER)''')

    logger.info('Flickr upload daemon starting')

    if get_database is not None:
        vis_db = get_database()
    else:
        vis_db = None

    while True:
        keys = []
        cur = conn.execute('SELECT key FROM waiting')
        for row in cur:
            keys.append(row['key'])

        for key in keys:
            data = retrieve(DONATION_SERVER, key)
            if data != '':
                datafile = tempfile.NamedTemporaryFile(delete=False)
                datafile.write(data.data)
                datafile.close()

                cur = conn.execute('''SELECT rowid,* FROM waiting
                                      WHERE key = ?''', (key,))
                waiting_row = cur.fetchone()
                waiting_id = waiting_row['rowid']

                tags = []
                for row in conn.execute('SELECT tag FROM tags WHERE waiting_id = ?',
                                        str(waiting_id)):
                    tags.append(row['tag'])

                logger.info('Uploading photo %s for token %s', key, waiting_row['token'])

                flickr = flickrapi.FlickrAPI(options.api_key, options.api_secret, token=waiting_row['token'], store_token=False)

                try:
                    flickr.auth_checkToken()
                    flickr.upload(filename=datafile.name, title=str(waiting_row['title']), tags=str(' '.join(tags)))
                    logger.info('Uploading photo %s succeeded', key)
                    if vis_db is not None:
                        vis_db.upload_photo(key)
                except flickrapi.FlickrError:
                    logger.info('Uploading photo %s failed', key)
                    if vis_db is not None:
                        vis_db.remove_photo(key)

                conn.execute('DELETE FROM waiting WHERE rowid = ?', str(waiting_id))
                conn.execute('DELETE FROM tags WHERE waiting_id = ?', str(waiting_id))
                conn.commit()

                os.unlink(datafile.name)

        time.sleep(PAUSE_TIME)
Exemple #9
0
def main():
    usage = 'usage: %s [options]'
    parser = OptionParser(usage=usage)
    parser.set_defaults(database='waiting_keys.sqlite',
                        api_key=os.environ['COMMUNITY_FLICKR_API_KEY'],
                        api_secret=os.environ['COMMUNITY_FLICKR_SECRET'])
    parser.add_option('-d',
                      '--database',
                      dest='database',
                      action='store',
                      type='string',
                      help='Waiting keys database')
    parser.add_option('-k',
                      '--flickr-api-key',
                      dest='api_key',
                      action='store',
                      type='string',
                      help='Flickr API key')
    parser.add_option('-s',
                      '--flickr-secret',
                      dest='api_secret',
                      action='store',
                      type='string',
                      help='Flickr API secret')
    (options, args) = parser.parse_args()

    if len(args) != 0:
        parser.error('Invalid argument')

    conn = sqlite3.connect(options.database)
    conn.row_factory = sqlite3.Row
    conn.execute('''CREATE TABLE IF NOT EXISTS waiting
                    (key TEXT, title TEXT, token TEXT)''')
    conn.execute('''CREATE TABLE IF NOT EXISTS tags
                    (tag TEXT, waiting_id INTEGER)''')

    logger.info('Flickr upload daemon starting')

    if get_database is not None:
        vis_db = get_database()
    else:
        vis_db = None

    while True:
        keys = []
        cur = conn.execute('SELECT key FROM waiting')
        for row in cur:
            keys.append(row['key'])

        for key in keys:
            data = retrieve(DONATION_SERVER, key)
            if data != '':
                datafile = tempfile.NamedTemporaryFile(delete=False)
                datafile.write(data.data)
                datafile.close()

                cur = conn.execute(
                    '''SELECT rowid,* FROM waiting
                                      WHERE key = ?''', (key, ))
                waiting_row = cur.fetchone()
                waiting_id = waiting_row['rowid']

                tags = []
                for row in conn.execute(
                        'SELECT tag FROM tags WHERE waiting_id = ?',
                        str(waiting_id)):
                    tags.append(row['tag'])

                logger.info('Uploading photo %s for token %s', key,
                            waiting_row['token'])

                flickr = flickrapi.FlickrAPI(options.api_key,
                                             options.api_secret,
                                             token=waiting_row['token'],
                                             store_token=False)

                try:
                    flickr.auth_checkToken()
                    flickr.upload(filename=datafile.name,
                                  title=str(waiting_row['title']),
                                  tags=str(' '.join(tags)))
                    logger.info('Uploading photo %s succeeded', key)
                    if vis_db is not None:
                        vis_db.upload_photo(key)
                except flickrapi.FlickrError:
                    logger.info('Uploading photo %s failed', key)
                    if vis_db is not None:
                        vis_db.remove_photo(key)

                conn.execute('DELETE FROM waiting WHERE rowid = ?',
                             str(waiting_id))
                conn.execute('DELETE FROM tags WHERE waiting_id = ?',
                             str(waiting_id))
                conn.commit()

                os.unlink(datafile.name)

        time.sleep(PAUSE_TIME)
Exemple #10
0
def main():
    usage = "usage: %s [options] <db_dir>"
    parser = OptionParser(usage=usage)
    parser.set_defaults(send_ratio=100, local_dir=None, estimate_db="estimate_db", payload_size=32000)
    parser.add_option(
        "-r",
        "--send-ratio",
        dest="send_ratio",
        action="store",
        type="float",
        help="Ratio between data to send and total data length",
    )
    parser.add_option(
        "-d",
        "--local-dir",
        dest="local_dir",
        action="store",
        type="string",
        help="Local content host directory (for testing)",
    )
    parser.add_option(
        "-e",
        "--estimate-db",
        dest="estimate_db",
        action="store",
        type="string",
        help="Location of capacity estimation database",
    )
    parser.add_option(
        "-m",
        "--memoize-db",
        dest="memoize",
        action="store",
        type="string",
        help="Location of memoization database (advanced)",
    )
    parser.add_option(
        "-f",
        "--file",
        dest="filename",
        action="store",
        type="string",
        help="Read news from a file; default is to fetch from BBC",
    )
    parser.add_option(
        "-s", "--size", dest="payload_size", action="store", type="int", help="Maximum size of payload to publish"
    )
    (options, args) = parser.parse_args()

    estimate_db = shelve.open(options.estimate_db, writeback=True)

    if len(args) != 1:
        parser.error("Need to specify donation database directory")

    while True:
        today = datetime.datetime.utcnow()
        address = common.format_address(today)

        print "Publishing document %s" % address

        if options.filename is not None:
            try:
                data = open(options.filename, "r").read()
            except:
                data = get_news(today, options.payload_size)
                try:
                    open(options.filename, "w").write(data)
                except:
                    pass
        else:
            data = get_news(today, options.payload_size)

        if get_database is not None:
            vis_database = get_database()
            vis_database.add_article_sender(data)

        db_dir = args[0]

        thread_info = []

        # Local directory
        if options.local_dir is not None:
            killswitch = threading.Event()
            thread = threading.Thread(
                target=send_news_local,
                args=(
                    address,
                    data,
                    db_dir,
                    options.local_dir,
                    options.send_ratio,
                    killswitch,
                    estimate_db,
                    options.memoize,
                ),
            )
            thread.daemon = True
            thread.start()
            thread_info.append((thread, killswitch))
        else:
            tags = get_tags()

            # Centralized
            killswitch = threading.Event()
            thread = threading.Thread(
                target=send_news_centralized,
                args=(address, data, db_dir, tags, options.send_ratio, killswitch, estimate_db),
            )
            thread.daemon = True
            thread.start()
            thread_info.append((thread, killswitch))

            # Community
            killswitch = threading.Event()
            thread = threading.Thread(
                target=send_news_community,
                args=(address, data, db_dir, tags, options.send_ratio, killswitch, estimate_db),
            )
            thread.daemon = True
            thread.start()
            thread_info.append((thread, killswitch))

        while today.day == datetime.datetime.utcnow().day:
            time.sleep(1)

        for (thread, killswitch) in thread_info:
            if thread.is_alive():
                killswitch.set()
                thread.join()
Exemple #11
0
def main():
    usage = 'usage: %s [options] <db_dir>'
    parser = OptionParser(usage=usage)
    parser.set_defaults(send_ratio=100,
                        local_dir=None,
                        estimate_db='estimate_db',
                        payload_size=32000)
    parser.add_option('-r',
                      '--send-ratio',
                      dest='send_ratio',
                      action='store',
                      type='float',
                      help='Ratio between data to send and total data length')
    parser.add_option('-d',
                      '--local-dir',
                      dest='local_dir',
                      action='store',
                      type='string',
                      help='Local content host directory (for testing)')
    parser.add_option('-e',
                      '--estimate-db',
                      dest='estimate_db',
                      action='store',
                      type='string',
                      help='Location of capacity estimation database')
    parser.add_option('-m',
                      '--memoize-db',
                      dest='memoize',
                      action='store',
                      type='string',
                      help='Location of memoization database (advanced)')
    parser.add_option(
        '-f',
        '--file',
        dest='filename',
        action='store',
        type='string',
        help='Read news from a file; default is to fetch from BBC')
    parser.add_option('-s',
                      '--size',
                      dest='payload_size',
                      action='store',
                      type='int',
                      help='Maximum size of payload to publish')
    (options, args) = parser.parse_args()

    estimate_db = shelve.open(options.estimate_db, writeback=True)

    if len(args) != 1:
        parser.error('Need to specify donation database directory')

    while True:
        today = datetime.datetime.utcnow()
        address = common.format_address(today)

        print 'Publishing document %s' % address

        if options.filename is not None:
            try:
                data = open(options.filename, 'r').read()
            except:
                data = get_news(today, options.payload_size)
                try:
                    open(options.filename, 'w').write(data)
                except:
                    pass
        else:
            data = get_news(today, options.payload_size)

        if get_database is not None:
            vis_database = get_database()
            vis_database.add_article_sender(data)

        db_dir = args[0]

        thread_info = []

        # Local directory
        if options.local_dir is not None:
            killswitch = threading.Event()
            thread = threading.Thread(target=send_news_local,
                                      args=(address, data, db_dir,
                                            options.local_dir,
                                            options.send_ratio, killswitch,
                                            estimate_db, options.memoize))
            thread.daemon = True
            thread.start()
            thread_info.append((thread, killswitch))
        else:
            tags = get_tags()

            # Centralized
            killswitch = threading.Event()
            thread = threading.Thread(target=send_news_centralized,
                                      args=(address, data, db_dir, tags,
                                            options.send_ratio, killswitch,
                                            estimate_db))
            thread.daemon = True
            thread.start()
            thread_info.append((thread, killswitch))

            # Community
            killswitch = threading.Event()
            thread = threading.Thread(target=send_news_community,
                                      args=(address, data, db_dir, tags,
                                            options.send_ratio, killswitch,
                                            estimate_db))
            thread.daemon = True
            thread.start()
            thread_info.append((thread, killswitch))

        while today.day == datetime.datetime.utcnow().day:
            time.sleep(1)

        for (thread, killswitch) in thread_info:
            if thread.is_alive():
                killswitch.set()
                thread.join()