Esempio n. 1
0
def run(*fileids):
    _redis = redis.client.Redis(settings.REDIS_HOST, settings.REDIS_PORT)
    connection = motor.MotorConnection().open_sync()
    db = connection.tiler

    try:
        cursor = db.images.find({'fileid': {'$in': fileids}})
        for document in (yield motor.Op(cursor.to_list)):
            pprint(document)
            yield motor.Op(db.images.update, {'_id': document['_id']},
                           {'$unset': {
                               'cdn_domain': 1
                           }})
            metadata_key = 'metadata:%s' % document['fileid']
            if _redis.get(metadata_key):
                print "Invalidated metadata cache"
                _redis.delete(metadata_key)
            lock_key = 'uploading:%s' % document['fileid']
            # locking it from aws upload for 1 hour
            _redis.setex(lock_key, time.time(), 60 * 60)

            upload_log = os.path.join(ROOT, 'static',
                                      'upload.%s.txt' % document['fileid'])
            if os.path.isfile(upload_log):
                os.remove(upload_log)

            print "\n"

    finally:
        IOLoop.instance().stop()
def run(*args):
    _redis = redis.client.Redis(settings.REDIS_HOST, settings.REDIS_PORT)
    connection = motor.MotorConnection().open_sync()
    db = connection.tiler

    try:
        cursor = (db.images.find())
        image = yield motor.Op(cursor.next_object)
        while image:
            _redis.incr('bytes_downloaded', image['size'])
            image = yield motor.Op(cursor.next_object)

    finally:
        IOLoop.instance().stop()
Esempio n. 3
0
def run(*fileids):
    connection = motor.MotorConnection().open_sync()
    db = connection.tiler
    cursor = db.images.find({'fileid': {'$in': fileids}})
    _ids = []
    for document in (yield motor.Op(cursor.to_list)):
        print document
        image_split = document['fileid'][:1] + '/' + document['fileid'][
            1:3] + '/' + document['fileid'][3:]
        for each in ('tiles', 'uploads', 'thumbnails'):
            d = os.path.join(HERE, '..', 'static', 'tiles', image_split)
            d = os.path.normpath(d)
            if os.path.isdir(d):
                print "DEL", d
                shutil.rmtree(d)
        yield motor.Op(db.images.remove, {'_id': document['_id']})
    IOLoop.instance().stop()
Esempio n. 4
0
def run(*fileids):
    _redis = redis.client.Redis(
        settings.REDIS_HOST,
        settings.REDIS_PORT
    )
    connection = motor.MotorConnection().open_sync()
    db = connection.tiler
    cursor = db.images.find({'fileid': {'$in': fileids}})
    _ids = []
    for document in (yield motor.Op(cursor.to_list)):
        print document
        image_split = document['fileid'][:1] + '/' + document['fileid'][1:3] + '/' + document['fileid'][3:]
        for each in ('tiles', 'uploads', 'thumbnails'):
            d = os.path.join(HERE, '..', 'static', 'tiles', image_split)
            d = os.path.normpath(d)
            if os.path.isdir(d):
                print "DEL", d
                shutil.rmtree(d)

        metadata_key = 'metadata:%s' % document['fileid']
        if _redis.get(metadata_key):
            print "Invalidated metadata cache"
            _redis.delete(metadata_key)
        lock_key = 'uploading:%s' % document['fileid']
        _redis.delete(lock_key)

        all_fileids_key = 'allfileids'
        _redis.delete(all_fileids_key)
        all_fileids_key += ':%s' % document['user']
        _redis.delete(all_fileids_key)

        cache_keys_key = 'thumbnail_grid:keys'
        for key in _redis.lrange(cache_keys_key, 0, -1):
            _redis.delete(key)
        _redis.delete(cache_keys_key)

        yield motor.Op(
            db.images.remove,
            {'_id': document['_id']}
        )

    IOLoop.instance().stop()
Esempio n. 5
0
def upload_original(fileid, extension, static_path, bucket_id):
    conn = S3Connection(settings.AWS_ACCESS_KEY, settings.AWS_SECRET_KEY)
    bucket = conn.lookup(bucket_id) or conn.create_bucket(bucket_id, location=Location.EU)

    db_connection = motor.MotorConnection().open_sync()
    db = db_connection[settings.DATABASE_NAME]

    original = find_original(fileid, static_path, extension)
    if original:
        relative_path = original.replace(static_path, '')
        k = Key(bucket)
        k.key = relative_path
        print "Uploading original", original
        s = os.stat(original)[stat.ST_SIZE]
        print "%.1fKb" % (s / 1024.)
        # reduced because I'm a cheapskate
        k.set_contents_from_filename(original, reduced_redundancy=True)
        print "Original uploaded"
    else:
        print "Original can't be found", repr(original)
Esempio n. 6
0
def run(*fileids):
    _redis = redis.client.Redis(settings.REDIS_HOST, settings.REDIS_PORT)
    connection = motor.MotorConnection().open_sync()
    db = connection.tiler

    try:
        cursor = db.images.find({'fileid': {'$in': fileids}})
        for document in (yield motor.Op(cursor.to_list)):
            pprint(document)
            yield motor.Op(db.images.update, {'_id': document['_id']},
                           {'$set': {
                               'featured': False
                           }})

            cache_keys_key = 'thumbnail_grid:keys'
            for key in _redis.lrange(cache_keys_key, 0, -1):
                _redis.delete(key)
            _redis.delete(cache_keys_key)

    finally:
        IOLoop.instance().stop()
Esempio n. 7
0
def run(*args):
    _redis = redis.client.Redis(settings.REDIS_HOST, settings.REDIS_PORT)
    connection = motor.MotorConnection().open_sync()
    db = connection.tiler

    yield motor.Op(db.images.remove, {'width': {'$exists': False}})

    try:
        cursor = (db.images.find({'featured': {'$exists': False}}))
        image = yield motor.Op(cursor.next_object)
        _fileids = {}
        while image:
            yield motor.Op(db.images.update, {'_id': image['_id']},
                           {'$set': {
                               'featured': True
                           }})
            print image['fileid']
            image = yield motor.Op(cursor.next_object)

    finally:
        IOLoop.instance().stop()
Esempio n. 8
0
def run(*args):
    _redis = redis.client.Redis(settings.REDIS_HOST, settings.REDIS_PORT)
    connection = motor.MotorConnection().open_sync()
    db = connection.tiler

    try:
        cursor = (db.comments.find())
        comment = yield motor.Op(cursor.next_object)
        _fileids = {}
        while comment:
            if comment['image'] not in _fileids:
                image = yield motor.Op(db.images.find_one,
                                       {'_id': comment['image']})
                _fileids[comment['image']] = image['fileid']
            fileid = _fileids[comment['image']]
            print fileid
            _redis.hincrby('comments', fileid, 1)
            print _redis.hget('comments', fileid)
            #_redis.incr('bytes_downloaded', image['size'])
            comment = yield motor.Op(cursor.next_object)

    finally:
        IOLoop.instance().stop()
Esempio n. 9
0
except ImportError:
    print >> sys.stderr, ("Can't import motor.\n\n"
                          " Motor is an experimental async driver for"
                          " MongoDB, get it by cloning\n"
                          " git://github.com/ajdavis/mongo-python-driver.git"
                          " and switching to branch 'motor',\n"
                          " then put the mongo-python-driver directory"
                          " on your PYTHONPATH\n\n")

    raise

if __name__ == "__main__":
    opts = options.options()

    # TODO: Mongo connection options
    db = motor.MotorConnection().open_sync().motorblog
    cache.startup(db)

    if opts.ensure_indexes:
        logging.info('Ensuring indexes...')
        indexes.ensure_indexes(db)
        logging.info('    done.')

    base_url = opts.base_url

    class U(tornado.web.URLSpec):
        def __init__(self, pattern, *args, **kwargs):
            """Include base_url in pattern"""
            super(U, self).__init__(
                '/' + base_url.strip('/') + '/' + pattern.lstrip('/'), *args,
                **kwargs)
Esempio n. 10
0
except ImportError:
    print >> sys.stderr, ("Can't import motor.\n\n"
                          " Motor is an experimental async driver for"
                          " MongoDB, get it by cloning\n"
                          " git://github.com/ajdavis/mongo-python-driver.git"
                          " and switching to branch 'motor',\n"
                          " then put the mongo-python-driver directory"
                          " on your PYTHONPATH\n\n")

    raise

if __name__ == "__main__":

    mongodb_uri = os.environ.get('MONGOLAB_URI', 'mongodb://localhost:27017')

    db = motor.MotorConnection(mongodb_uri).open_sync().agg
    static_path = 'static'

    application = tornado.web.Application([
        URLSpec(r"/static/(.+)",
                StaticFileHandler, {"path": static_path},
                name='static'),
        URLSpec(r"/answer/(?P<quiz_id>.+)", AnswerHandler, name='answer'),
        URLSpec(r"/(?P<quiz_id>[0-9]+)?", MainHandler, name='main'),
        URLSpec(r"/example/?", ExampleHandler, name='example'),
    ],
                                          db=db,
                                          template_path='templates',
                                          debug=True)

    http_server = httpserver.HTTPServer(application)
Esempio n. 11
0
def serve(listenuri, mongodburi):

    #TODO add RedisURI
    #TODO add Static Path setting
    static_path = os.path.join(os.path.dirname(__file__), "static")
    static_path_dict = dict(path=static_path)
    
    template_path = os.path.join(os.path.dirname(__file__), "templates")

    inbound_path = os.path.join(os.path.dirname(__file__), "../inbound")
        
    motordb = motor.MotorConnection(mongodburi).open_sync().ace
    mongodb = pymongo.Connection(mongodburi).ace
    tornadoredisdb = tornadoredis.Client()
    redisdb = redis.Redis()
    
    application = tornado.web.Application(
        handlers = [
            tornado.web.URLSpec(r"/static/(.*)", tornado.web.StaticFileHandler, static_path_dict),
            tornado.web.URLSpec(r"/(stylesheets/.*)", tornado.web.StaticFileHandler, static_path_dict),
            tornado.web.URLSpec(r"/(images/.*)", tornado.web.StaticFileHandler, static_path_dict),
            tornado.web.URLSpec(r"/(javascripts/.*)", tornado.web.StaticFileHandler, static_path_dict),
            tornado.web.URLSpec(r"/(favicon.ico)$", tornado.web.StaticFileHandler, static_path_dict),
            tornado.web.URLSpec(r"/(robots.txt)$", tornado.web.StaticFileHandler, static_path_dict),
            #tornado.web.URLSpec(r'/login$', AuthLoginHandler, name='login'),
            #tornado.web.URLSpec(r'/logout$', AuthLogoutHandler, name='logout'),
            #tornado.web.URLSpec(r'/dashboard/(?P<search_oid_str_or_request>\w{24})$', DashboardHandler, name='dashboard+search'),
            #tornado.web.URLSpec(r'/dashboard/(?P<search_oid_str_or_request>\w+)$', DashboardHandler, name='dashboard+newsearch'),
            #tornado.web.URLSpec(r'/dashboard$', DashboardHandler, name='dashboard'),
            #tornado.web.URLSpec(r'/search/(?P<search_oid_str>\w{24})/(?P<collection_oid_str>\w{24})$', SearchCollectionHandler, name='search+collection'),
            #tornado.web.URLSpec(r'/search/(?P<search_oid_str>\w{24})$', SearchAllHandler, name='search'),
            #tornado.web.URLSpec(r'/export/csv/(?P<search_oid_str>\w{24})/(?P<collection_oid_str>\w{24})$', ExportCSVSearchCollectionHandler, name='export+search+collection+csv'),
            #tornado.web.URLSpec(r'/api/document/batch$', APIDocumentBatchHandler),
            #tornado.web.URLSpec(r'/api/document/insert$', APIDocumentInsertHandler),
            #tornado.web.URLSpec(r'/api/document/link$', APIDocumentLinkHandler),
            #tornado.web.URLSpec(r"/$", tornado.web.RedirectHandler, kwargs=dict(url='/dashboard')), #Temporary pending main advert/news page
            tornado.web.URLSpec(r"/jsontest$", JSONTestHandler),
            tornado.web.URLSpec(r"/region/(.*)", RegionHandler),
            tornado.web.URLSpec(r"/purchase$", PurchaseHandler),
            tornado.web.URLSpec(r"/login$", LoginHandler, name='login'),
            tornado.web.URLSpec(r"/logout$", LogoutHandler, name='logout'),
            tornado.web.URLSpec(r"/$", MainHandler, name='index'),
            tornado.web.URLSpec(r"/(.*)", PageErrorHandler, kwargs=dict(error=404)),
        ],
        **{          
            'template_path': template_path,
            'static_path': static_path,
            'inbound_path': inbound_path,
            'cookie_secret': 'cookiemonster',
            'xsrf_cookies': True,
            'debug': True, #FIXME
            'login_url': '/login',
            'mongodb': mongodb,
            'motordb': motordb,
            'tornadoredisdb': tornadoredisdb,
            'redisdb': redisdb,
            'siteuser': bson.ObjectId('50bb047f17a78f9c422b45da'),
            'cache': {
                'region': {
                    'key': 'aceregion',
                    'stamp': None,
                    'array': [],
                    'sectioned': {},
                    'map': {
                        '_id': {},
                        'slug': {},
                    },
                },
            },
            'pycket': {
                'engine': 'redis',
                'storage': {
                    'host': 'localhost',
                    'port': 6379,
                    'db_sessions': 10,
                    'db_notifications': 11,
                },
                'cookies': {
                    'expires_days': 120,
                },
            },
        }
    )


    server = tornado.httpserver.HTTPServer(application, xheaders=True)
    server.listen(8080)

    tornado.ioloop.IOLoop.instance().start()
Esempio n. 12
0
class MessagesHandler(tornado.web.RequestHandler):
    @tornado.web.asynchronous
    def get(self):
        """Display all messages
        """
        self.write('<a href="/compose">Compose a message</a><br>')
        self.write('<ul>')
        db = self.settings['db']
        db.messages.find().sort([('_id', -1)]).each(self._got_message)

    def _got_message(self, message, error):
        if error:
            raise tornado.web.HTTPError(500, error)
        elif message:
            self.write('<li>%s</li>' % message['msg'])
        else:
            # Iteration complete
            self.write('</ul>')
            self.finish()


db = motor.MotorConnection().open_sync().test

application = tornado.web.Application([(r'/compose', NewMessageHandler),
                                       (r'/', MessagesHandler)],
                                      db=db)

print 'Listening on http://localhost:8888'
application.listen(8888)
tornado.ioloop.IOLoop.instance().start()
Esempio n. 13
0
 def db(self):
     if not self._db_connection:
         self._db_connection = motor.MotorConnection().open_sync()
     return self._db_connection[settings.DATABASE_NAME]
Esempio n. 14
0
def upload_all_tiles(fileid,
                     static_path,
                     bucket_id,
                     max_count=0,
                     only_if_no_cdn_domain=False,
                     replace=True):
    #print "upload_all_tiles", fileid, static_path, bucket_id, max_count, only_if_no_cdn_domain, replace
    log_file = os.path.join(static_path, 'upload.%s.txt' % fileid)

    conn = connect_to_s3()
    bucket = conn.lookup(bucket_id) or conn.create_bucket(bucket_id,
                                                          location=Location.EU)
    #bucket.set_acl('public-read')

    db_connection = motor.MotorConnection().open_sync()
    db = db_connection[settings.DATABASE_NAME]

    document = yield motor.Op(db.images.find_one, {'fileid': fileid})
    if not document:
        logging.warning("Image %r does not exist" % fileid)
        IOLoop.instance().stop()
        return

    if document.get('cdn_domain'):
        if only_if_no_cdn_domain:
            IOLoop.instance().stop()
            return
        else:
            warnings.warn("%s already has a cdn_domain (%s)" %
                          (fileid, document['cdn_domain']))

    try:
        count = 0
        all_done = True
        all_tiles = list(find_all_tiles(fileid, static_path))
        random.shuffle(all_tiles)
        #if len(all_tiles) > max_count:
        #    total = max_count
        #else:
        #    total = len(all_tiles)
        total = len(all_tiles)
        aggressive_headers = get_aggressive_headers()
        for each in all_tiles:
            # load which ones we've done every time to prevent
            # parallel workers uploading the same file more than once
            try:
                done = [x.strip() for x in open(log_file) if x.strip()]
            except IOError:
                done = []
            if each not in done:
                done.append(each)
                relative_path = each.replace(static_path, '')
                k = Key(bucket)
                k.key = relative_path
                # docs:
                # http://boto.cloudhackers.com/en/latest/ref/s3.html#boto.s3.\
                #   key.Key.set_contents_from_filename
                print "uploading", relative_path,
                try:
                    count_done = set(x.strip() for x in open(log_file))
                except IOError:
                    count_done = []
                print "(%d of %d)" % (len(count_done), total)
                k.set_contents_from_filename(
                    each,
                    replace=replace,
                    reduced_redundancy=True,
                    headers=aggressive_headers,
                )
                k.make_public()
                open(log_file, 'a').write(each + '\n')
                count += 1
                if max_count > 0 and count >= max_count:
                    print "STOPPING @", count
                    all_done = False
                    break

        if all_done:
            data = {'cdn_domain': settings.DEFAULT_CDN_TILER_DOMAIN}
            print "Updating document finally"
            yield motor.Op(db.images.update, {'_id': document['_id']},
                           {'$set': data})
            # invalidate some redis keys
            _redis = redis.client.Redis(settings.REDIS_HOST,
                                        settings.REDIS_PORT)
            lock_key = 'uploading:%s' % fileid
            _redis.delete(lock_key)
            metadata_key = 'metadata:%s' % fileid
            # make it expire in a minute
            data = _redis.get(metadata_key)
            if data:
                # this gives all workers a chance to finish
                # any leftover jobs such as optimizations
                _redis.setex(metadata_key, data, 60)

    finally:
        print "# done", count
        IOLoop.instance().stop()
Esempio n. 15
0
def main(args):
    start = time.time()

    opts = options.options()
    destination_url = '/' + opts.base_url.lstrip('/')
    parts = urlparse(args.source_url)
    source_base_url = urljoin('%s://%s' % (parts[0], parts[1]),
                              parts[2].split('/xmlrpc.php')[0])

    print 'Base URL', source_base_url

    db = pymongo.Connection(safe=True).motorblog
    motordb = motor.MotorConnection().open_sync().motorblog
    if args.wipe:
        print 'Wiping motorblog database'
        db.connection.drop_database('motorblog')
        print 'Creating capped collection "events"'
        create_events_collection(motordb)
        print 'Recreating indexes'
        ensure_indexes(db)

    source = Blog(args.source_url,
                  args.source_username,
                  args.source_password,
                  use_cache=not args.refresh,
                  verbose=args.verbose)
    print 'Getting media library'

    media_library = set([m['link'] for m in source.get_media_library()])

    print '    %s assets\n' % len(media_library)

    print 'Getting posts and pages'
    post_structs = source.get_recent_posts(args.nposts)
    print '    %s posts' % len(post_structs)
    page_structs = source.get_pages()
    print '    %s pages' % len(page_structs)
    print

    for structs, type in [
        (post_structs, 'post'),
        (page_structs, 'page'),
    ]:
        print '%sS' % type.upper()
        for struct in structs:
            categories = struct.pop('categories', [])
            struct['description'] = wordpress_to_markdown(
                struct, media_library, db, destination_url, source_base_url)

            post = Post.from_metaweblog(struct, type)

            print '%-34s %s' % (post.title, post.status.upper())
            for category_name in categories:
                doc = db.categories.find_one({'name': category_name})
                if doc:
                    category = Category(**doc)
                else:
                    category = Category(name=category_name,
                                        slug=slugify(category_name))
                    category.id = db.categories.insert(category.to_python())
                print '    %-30s %s' % (category_name,
                                        ' NEW' if not doc else '')

                post.categories.append(category)

            db.posts.insert(post.to_python())

        print '\nFinished %s %ss' % (len(structs), type)

    print 'Posting "categories_changed" event'
    db.events.insert(
        {
            'ts': datetime.datetime.utcnow(),
            'name': 'categories_changed'
        },
        manipulate=False)  # No need to add _id

    print '\nFinished in %.2f seconds' % (time.time() - start)