Ejemplo n.º 1
0
class CouchDBBackend(object):
    @property
    def db(self):
        return self.server[self.db_name]

    def __init__(self, config):
        settings = config.registry.settings

        self.config = config
        self.server = Server(settings['backend.db_host'])
        self.db_name = os.environ.get('DB_NAME', settings['backend.db_name'])

        self.create_db_if_not_exist()
        self.sync_views()
        self.config.add_subscriber(self.add_db_to_request, NewRequest)

    def delete_db(self):
        del self.server[self.db_name]

    def create_db_if_not_exist(self):
        try:
            self.server.create(self.db_name)
            logger.debug('Creating and using db "%s"' % self.db_name)
        except PreconditionFailed:
            logger.debug('Using db "%s".' % self.db_name)

    def sync_views(self):
        ViewDefinition.sync_many(self.db, docs)

    def add_db_to_request(self, event):
        event.request.db = Database(self.db)
Ejemplo n.º 2
0
 def _open_couch_db(dbname):
     server = Server(url=self._couch_url)
     try:
         server[dbname]
     except ResourceNotFound:
         server.create(dbname)
     return server[dbname]
Ejemplo n.º 3
0
class CouchDBBackend(object):

    @classmethod
    def load_from_config(cls, config):
        settings = config.registry.settings
        return CouchDBBackend(
            host=settings['backend.db_host'],
            db_name=os.environ.get('DB_NAME', settings['backend.db_name']),
        )

    def __init__(self, host, db_name, id_generator):
        self.server = Server(host)
        self.db_name = db_name

        try:
            self.create_db_if_not_exist()
        except socket.error as e:
            raise CouchDBBackendConnectionError(
                "Unable to connect to the CouchDB server: %s - %s" % (host, e))

        self._db = self.server[self.db_name]
        self.sync_views()

    def delete_db(self):
        del self.server[self.db_name]

    def create_db_if_not_exist(self):
        try:
            self.server.create(self.db_name)
            logger.info('Creating and using db "%s"' % self.db_name)
        except PreconditionFailed:
            logger.info('Using db "%s".' % self.db_name)

    def sync_views(self):
        ViewDefinition.sync_many(self.server[self.db_name], docs)

    def __get_raw_user_token(self, user_id):
        try:
            return views.usertokens(self._db, key=user_id).rows[0].value
        except IndexError:
            raise UserIdNotFound(user_id)

    def get_user_token(self, user_id):
        """Returns the information associated with a user token"""
        usertoken = dict(**self.__get_raw_user_token(user_id))
        return usertoken['token']

    def add_token(self, user_id, token):
        # Check that the token doesn't already exist.
        try:
            self.__get_raw_user_token(user_id)
            raise UserIdAlreadyExist(user_id)
        except UserIdNotFound:
            pass

        doc = dict(token=token, user_id=user_id, type='usertoken')
        self._db.save(doc)
Ejemplo n.º 4
0
def connector(collection):
    # Make a server connection
    svr = Server()
    if collection not in svr:
        return svr.create(collection)
    else:
        return svr[collection]
Ejemplo n.º 5
0
def couch_connect():
    server = Server()
    try:
        db = server.create('feedme')
    except:
        db = server['feedme']
    return db
Ejemplo n.º 6
0
def connector(collection):
  # Make a server connection
  svr = Server()
  if collection not in svr:
    return svr.create(collection)
  else:
    return svr[collection]
Ejemplo n.º 7
0
 def get_db(self, db_uri, db_name):
     server = Server(db_uri)
     try:
         db = server[db_name]
     except ResourceNotFound:
         db = server.create(db_name)
     return db
Ejemplo n.º 8
0
def initDatabase():
    server = Server('http://localhost:5984')
    del server['lazycrawler']    
    try:
        db = server.create('lazycrawler')
    except Exception:
        db = server['lazycrawler']
    return db
Ejemplo n.º 9
0
def Create_Couchdb_Instance(dtbs_name):
    
    server = Server("http://127.0.0.1:5984")
    try:
        db = server.create(dtbs_name)
    except Exception:
        db = server[dtbs_name]
    return db  
Ejemplo n.º 10
0
 def saveDocument(self):
     server = Server()
     server.resource.credentials = ('secret', 'aedca29aed23e103f27f8e29dd6ec473')
     # create database
     db = server.create("new.job")
     # associate NewJob to the db
     NewJob.set_db(db)
     self.save()
Ejemplo n.º 11
0
Archivo: db.py Proyecto: rndD/boxus
class DB(object):

    server = None
    sensors = None
    readings = None
    devices = None

    config = {
        # TODO Server credentials are currently not used
        'server': {
            'host': 'localhost',
            'port': 5984
        },
        'schema': {
            'sensors_db': 'sensors',
            'readings_db': 'readings',
            'devices_db': 'devices'
        }
    }

    def __init__(self, config_path=None):
        self.server = Server()

        if config_path:
            self.config = yaml.load(open(config_path, 'r').read())

        if not all(db_name in self.server
                   for db_name in self.config['schema'].values()):
            self.setup()

        self.connect()

    def connect(self):
        for db_name, attr_name in [['sensors_db', 'sensors'],
                                   ['readings_db', 'readings'],
                                   ['devices_db', 'devices']]:
            setattr(self, attr_name,
                    self.server[self.config['schema'][db_name]])

    def setup(self):
        for db_name in ['sensors_db', 'readings_db', 'devices_db']:
            if self.config['schema'][db_name] not in self.server:
                self.server.create(self.config['schema'][db_name])
Ejemplo n.º 12
0
def init_boards():
    """

    :rtype : object
    """
    server = Server()
    try:
        db = server.create('boards')
    except Exception:
        db = server['boards']
    return db
Ejemplo n.º 13
0
def init_boards():
    """

    :rtype : object
    """
    server = Server()
    try:
        db = server.create("boards")
    except Exception:
        db = server["boards"]
    return db
Ejemplo n.º 14
0
    def __init__(self, db_uri):
        local.application = self

        server = Server(db_uri)
        try:
            db = server.create("urls")
        except Exception:
            db = server["urls"]
        self.dispatch = SharedDataMiddleware(self.dispatch, {"/static": STATIC_PATH})

        URL.db = db
Ejemplo n.º 15
0
def main(global_config, **settings):
    """ This function returns a Pyramid WSGI application.
    """
    authn_policy = AuthTktAuthenticationPolicy(
        'sosecret', hashalg='sha512')
    authz_policy = ACLAuthorizationPolicy()
    config = Configurator(settings=settings, root_factory=RootFactory)
    config.set_authentication_policy(authn_policy)
    config.set_authorization_policy(authz_policy)
    dbserver = Server(url = settings['couchdb.url'])
    if settings['couchdb.dbname'] not in dbserver:
        dbserver.create(settings['couchdb.dbname'])
    config.registry.settings['couchdb.server'] = dbserver
    config.add_renderer(".html", "pyramid.mako_templating.renderer_factory")
    config.add_static_view('static', 'static', cache_max_age=3600)
    config.add_route('home', '/')
    config.add_route('item', '/item/*traverse', factory=Container)
    config.add_route('tag', '/tag/*traverse', factory=Tags)
    config.scan()
    return config.make_wsgi_app()
Ejemplo n.º 16
0
    def __init__(self, db_uri):
        local.application = self

        server = Server(db_uri)
        try:
            db = server.create("urls")
        except Exception:
            db = server["urls"]
        self.dispatch = SharedDataMiddleware(self.dispatch,
                                             {"/static": STATIC_PATH})

        URL.db = db
Ejemplo n.º 17
0
    def __init__(self):

        from couchdb.client import Server

        couch = Server('http://localhost:5984')
        print (couch)
        try:
            self.db = couch.create('run-layer')
        except:
            self.db = couch['run-layer']

        print (self.db)
Ejemplo n.º 18
0
    def __init__(self, db_uri):
        local.application = self

        server = Server(db_uri)
        try:
            db = server.create('urls')
        except:
            db = server['urls']
        self.dispatch = SharedDataMiddleware(self.dispatch, {
            '/static':    STATIC_PATH
        })

        URL.db = db
Ejemplo n.º 19
0
class Connection(object):
    """
        Connect to CouchDb according to params in the settings.py file
        and store that internally.

        Access is made with this class cause it's a singleton.
    """

    _inst = None

    def __new__(cls, address=None, port=None, db_name=None,  *args, **kwargs):
        """
            Ensure we have only one instance for the connection.
        """
        if not cls._inst:
            cls._inst = object.__new__(cls, *args, **kwargs)
            cls._inst.connect(address, port, db_name)
        return cls._inst


    def close(self):
        Connection._inst = None


    def connect(self, address=None, port=None, db_name=None):
        """
            Connect to the CouchDB server and work on the databse mentioned in
            the settngs.
        """
        address = address or settings.SERVER_ADDRESS

        self.url = "%s:%s/" % (address.rstrip("/"), port or settings.SERVER_PORT)
        self.server = Server(self.url)

        db_name = db_name or settings.DATABASE_NAME
        try:
            self.db = self.server.create(db_name)
        except PreconditionFailed:
            self.db = self.server[db_name]


    def __unicode__(self):
        return u"Connected on %s - working on %s" % (self.url, self.db.name)

    def __str__(self):
        return unicode(self)

    def __repr__(self):
        return repr(self.db)
Ejemplo n.º 20
0
class CouchDBBackend(object):
    def db(self):
        return Database(self.server[self.db_name], self._generate_id)

    def __init__(self, config):
        settings = config.registry.settings

        self.config = config
        self.server = Server(settings['backend.db_host'])
        self.db_name = os.environ.get('DB_NAME', settings['backend.db_name'])

        # model id generator
        generator = config.maybe_dotted(settings['daybed.id_generator'])
        self._generate_id = generator(config)

        try:
            self.create_db_if_not_exist()
        except socket.error as e:
            raise CouchDBBackendConnectionError(
                "Unable to connect to the CouchDB server: %s - %s" % (
                    settings['backend.db_host'], e))

        self.sync_views()

    def delete_db(self):
        del self.server[self.db_name]

    def create_db_if_not_exist(self):
        try:
            self.server.create(self.db_name)
            logger.info('Creating and using db "%s"' % self.db_name)
        except PreconditionFailed:
            logger.info('Using db "%s".' % self.db_name)

    def sync_views(self):
        ViewDefinition.sync_many(self.server[self.db_name], docs)
Ejemplo n.º 21
0
def main(global_config, **settings):
    auth_token = MozillaTokenLibAuthenticationPolicy(secret='what_makes_so_secret', hashmod=hashlib.sha256, callback=groupfinder, timeout=86400)
    auth_permission = ACLAuthorizationPolicy()
    config = Configurator(settings=settings,
                          root_factory=RootFactory)
    config.set_authentication_policy(auth_token)
    config.set_authorization_policy(auth_permission)
    db_server = Server(url=settings['CouchDB.url'])
    es_server = ElasticSearch(settings['ES.url'])
    upyun_server = UpYun(settings['upyun.space'], settings['upyun.username'], settings['upyun.password'], timeout=30, endpoint=ED_AUTO)
    if settings['CouchDB.db_name'] not in db_server:
        db_server.create(settings['CouchDB.db_name'])
    config.registry.settings['CouchDB.server'] = db_server
    config.registry.settings['ES.server'] = es_server
    config.registry.settings['UpYun.server'] = upyun_server
    config.add_static_view('static', 'static', cache_max_age=3600)
    config.include("cornice")
    config.add_subscriber(add_couchdb_to_request, NewRequest)
    config.add_route('home', '/')
    config.scan("api.views.restapi")
    config.scan("api.views.display")
    requests_log = logging.getLogger("requests")
    requests_log.setLevel(logging.WARNING)
    return config.make_wsgi_app()
Ejemplo n.º 22
0
class TestCouchDBBackend(BackendTestBase, TestCase):

    def setUp(self):
        self.server = Server('http://localhost:5984')
        self.db_name = 'test_%s' % uuid4()
        self.server.create(self.db_name)

        db = self.server[self.db_name]
        ViewDefinition.sync_many(db, couchdb_views)
        self.db = CouchDBDatabase(db, lambda: six.text_type(uuid4()))
        super(TestCouchDBBackend, self).setUp()

    def tearDown(self):
        del self.server[self.db_name]

    def test_server_unreachable(self):
        config = mock.Mock()
        config.registry = mock.Mock()
        config.registry.settings = defaultdict(str)
        config.registry.settings['backend.db_host'] = 'http://unreachable/'
        config.registry.settings['backend.db_name'] = 'daybed'

        with self.assertRaises(CouchDBBackendConnectionError):
            CouchDBBackend(config)
Ejemplo n.º 23
0
class CouchProvider(object):
    def __init__(self, uri=None, database=None):
        print '%s|%s' % (uri, database)
        self.server = Server(uri)

        try:
            # python-couchdb will create the database or raise an error if it
            # already exists
            self.database = self.server.create(database)
        except:
            self.database = self.server[database]

    def fetch(self, uuid, type=None):
        doc = self.database[uuid]
        if doc['type'] == type:
            return doc
        else:
            raise DocumentNotFoundException(uuid)
Ejemplo n.º 24
0
def fire_em_all(skip, limit):
    collection = 'pantip'
    svr = Server()
    if collection not in svr:
        src = svr.create(collection)
    else:
        src = svr[collection]

    # Iterate through the collection and fire
    n = 0
    n_processed = 0
    for _id in src:
        n += 1
        rec = src.get(_id)
        if n < skip: continue

        if n_processed > limit:
            print(colored('Out of ammo!', 'red'))
            return

        # Fire a single request
        print(colored('Firing #{0}'.format(_id)))
        fire_request(rec)
        n_processed += 1
Ejemplo n.º 25
0
def fire_em_all(skip,limit):
	collection = 'pantip'
	svr = Server()
	if collection not in svr:
		src = svr.create(collection)
	else:
		src = svr[collection]

	# Iterate through the collection and fire
	n = 0
	n_processed = 0
	for _id in src:
		n += 1
		rec = src.get(_id)
		if n<skip: continue

		if n_processed>limit:
			print(colored('Out of ammo!','red'))
			return 

		# Fire a single request
		print(colored('Firing #{0}'.format(_id)))
		fire_request(rec)
		n_processed += 1
Ejemplo n.º 26
0
class CouchDBBackend(object):
    @classmethod
    def load_from_config(cls, config):
        settings = config.registry.settings

        generator = config.maybe_dotted(settings['daybed.id_generator'])
        return CouchDBBackend(host=settings['backend.db_host'],
                              db_name=os.environ.get(
                                  'DB_NAME', settings['backend.db_name']),
                              id_generator=generator(config))

    def __init__(self, host, db_name, id_generator):
        self.server = Server(host)
        self.db_name = db_name

        try:
            self.create_db_if_not_exist()
        except socket.error as e:
            raise CouchDBBackendConnectionError(
                "Unable to connect to the CouchDB server: %s - %s" % (host, e))

        self._db = self.server[self.db_name]
        self.sync_views()
        self._generate_id = id_generator

    def delete_db(self):
        del self.server[self.db_name]

    def create_db_if_not_exist(self):
        try:
            self.server.create(self.db_name)
            logger.info('Creating and using db "%s"' % self.db_name)
        except (PreconditionFailed, Unauthorized):
            logger.info('Using db "%s".' % self.db_name)

    def sync_views(self):
        ViewDefinition.sync_many(self.server[self.db_name], docs)

    def get_models(self, principals):
        principals = list(set(principals))
        models = {}
        for result in views.models(self._db, keys=principals).rows:
            doc = result.value
            _id = doc["_id"]
            models[_id] = {
                "id": _id,
                "title": doc["definition"].get("title", _id),
                "description": doc["definition"].get("description", "")
            }
        return list(models.values())

    def __get_raw_model(self, model_id):
        try:
            doc = views.model_definitions(self._db, key=model_id).rows[0]
            return doc.value
        except IndexError:
            raise backend_exceptions.ModelNotFound(model_id)

    def get_model_definition(self, model_id):
        return self.__get_raw_model(model_id)['definition']

    def __get_raw_records(self, model_id):
        # Make sure the model exists.
        self.__get_raw_model(model_id)
        return views.records(self._db, key=model_id).rows

    def get_records(self, model_id, raw_records=None):
        return [
            r["record"]
            for r in self.get_records_with_authors(model_id, raw_records)
        ]

    def get_records_with_authors(self, model_id, raw_records=None):
        if raw_records is None:
            raw_records = self.__get_raw_records(model_id)
        records = []
        for item in raw_records:
            item.value['record']['id'] = item.value['_id'].split('-')[1]
            records.append({
                "authors": item.value['authors'],
                "record": item.value['record']
            })
        return records

    def __get_raw_record(self, model_id, record_id):
        key = u'-'.join((model_id, record_id))
        try:
            return views.records_all(self._db, key=key).rows[0].value
        except IndexError:
            raise backend_exceptions.RecordNotFound(u'(%s, %s)' %
                                                    (model_id, record_id))

    def _model_exists(self, model_id):
        try:
            self.__get_raw_model(model_id)
            return True
        except backend_exceptions.ModelNotFound:
            return False

    def _record_exists(self, model_id, record_id):
        try:
            self.__get_raw_record(model_id, record_id)
            return True
        except backend_exceptions.RecordNotFound:
            return False

    def get_record(self, model_id, record_id):
        doc = self.__get_raw_record(model_id, record_id)
        record = doc['record']
        record['id'] = record_id
        return record

    def get_record_authors(self, model_id, record_id):
        doc = self.__get_raw_record(model_id, record_id)
        return doc['authors']

    def put_model(self, definition, permissions, model_id=None):
        if model_id is None:
            model_id = self._generate_id(key_exist=self._model_exists)

        try:
            doc = self.__get_raw_model(model_id)
        except backend_exceptions.ModelNotFound:
            doc = {'_id': model_id, 'type': 'definition'}
        doc['definition'] = definition
        doc['permissions'] = permissions

        definition_id, _ = self._db.save(doc)
        return definition_id

    def put_record(self, model_id, record, authors, record_id=None):
        doc = {
            'type': 'record',
            'authors': authors,
            'model_id': model_id,
            'record': record
        }

        if record_id is not None:
            try:
                old_doc = self.__get_raw_record(model_id, record_id)
            except backend_exceptions.RecordNotFound:
                doc['_id'] = '-'.join((model_id, record_id))
            else:
                authors = list(set(authors) | set(old_doc['authors']))
                doc['authors'] = authors
                old_doc.update(doc)
                doc = old_doc
        else:
            key_exist = functools.partial(self._record_exists, model_id)
            record_id = self._generate_id(key_exist=key_exist)
            doc['_id'] = '-'.join((model_id, record_id))

        self._db.save(doc)
        return record_id

    def delete_record(self, model_id, record_id):
        doc = self.__get_raw_record(model_id, record_id)
        if doc:
            self._db.delete(doc)
        return doc

    def delete_records(self, model_id):
        results = self.__get_raw_records(model_id)
        for result in results:
            self._db.delete(result.value)
        return self.get_records(model_id, raw_records=results)

    def delete_model(self, model_id):
        """DELETE ALL THE THINGS"""

        # Delete the associated data if any.
        records = self.delete_records(model_id)

        try:
            doc = views.model_definitions(self._db, key=model_id).rows[0].value
        except IndexError:
            raise backend_exceptions.ModelNotFound(model_id)

        # Delete the model definition if it exists.
        self._db.delete(doc)
        return {
            "definition": doc["definition"],
            "permissions": doc["permissions"],
            "records": records
        }

    def __get_raw_token(self, credentials_id):
        try:
            return views.tokens(self._db, key=credentials_id).rows[0].value
        except IndexError:
            raise backend_exceptions.CredentialsNotFound(credentials_id)

    def get_token(self, credentials_id):
        """Returns the information associated with a credentials_id"""
        credentials = dict(**self.__get_raw_token(credentials_id))
        return credentials['token']

    def get_credentials_key(self, credentials_id):
        """Retrieves a token by its id"""
        credentials = dict(**self.__get_raw_token(credentials_id))
        return credentials['credentials']['key']

    def store_credentials(self, token, credentials):
        # Check that the token doesn't already exist.
        assert 'id' in credentials and 'key' in credentials
        try:
            self.__get_raw_token(credentials['id'])
            raise backend_exceptions.CredentialsAlreadyExist(credentials['id'])
        except backend_exceptions.CredentialsNotFound:
            pass

        doc = dict(token=token, credentials=credentials, type='token')
        self._db.save(doc)

    def get_model_permissions(self, model_id):
        doc = self.__get_raw_model(model_id)
        return doc['permissions']
# You should have received a copy of the GNU Lesser General Public 
# License along with this library.  If not, see <http://www.gnu.org/licenses/>.
# 
##

import couchdb
from couchdb.client import Server

#This connects to the couchdb server
server = Server('http://127.0.0.1:9999/')
print 'Connected Established'

try:
	#This creates a database in couchdb called gamedb
	#Database names must be lowercase, but table/document names do not need to be
	db = server.create('gamedb')
	print 'database created'
except:
	#In the case the database already exists we will delete it and start a new one
	del server['gamedb']
	db = server.create('gamedb')
	print 'database deleted and created'

#Creating JSON object data, db is your database, while 'character#' is the table name you will be assigning. 
#The dictionary holds a variable you are adding and it's value. IE: uid is the variable and 1 is the value. 
#Python will automatically change this to a json object for you when it writes to the database
db['character1'] = dict(uid=1, level=12, name='py')
db['character2'] = dict(uid=2, level=25, name='thon')
db['character3'] = dict(uid=3, level=30, name='three')

#this is how you build a query, the query is exported javascript
Ejemplo n.º 28
0
class Database():
    """The Database class incorperates functions you wish to ask
    the database"""

    jsdir = CONFIG.get('couchdb', 'javascript_directory')

    __DESIGN_VIEWS_PATHS_MAP = \
        file(jsdir + '/design_views_paths_map.js').read()
    __DESIGN_VIEWS_SHASUMS_MAP = \
        file(jsdir + '/design_views_shasums_map.js').read()
    __DESIGN_VIEWS_FORMATS_MAP = \
        file(jsdir + '/design_views_formats_map.js').read()
    __DESIGN_VIEWS_FORMATS_REDUCE = '_sum'
    __DESIGN_VIEWS_SOUND_MAP = \
        file(jsdir + '/design_views_sound_map.js').read()
    __DESIGN_VIEWS_VIDEO_MAP = \
        file(jsdir + '/design_views_sound_map.js').read()
    __DESIGN_FULLTEXT_ARTIST_INDEX = \
        file(jsdir + '/design_fulltext_artist_index.js').read()
    __DESIGN_FULLTEXT_EVERYTHING_INDEX = \
        file(jsdir + '/design_fulltext_artist_index.js').read()

    def create_views(self):
        """creates views and saves them to the database"""
        LOG.info('creating views')
        views = {
            '_id': '_design/views',
            'language': 'javascript',
            'views': {
                'shasums': {
                    'map': self.__DESIGN_VIEWS_SHASUMS_MAP
                },
                'paths': {
                    'map': self.__DESIGN_VIEWS_PATHS_MAP
                },
                'formats': {
                    'map': self.__DESIGN_VIEWS_FORMATS_MAP,
                    'reduce': self.__DESIGN_VIEWS_FORMATS_REDUCE
                },
                'sound': {
                    'map': self.__DESIGN_VIEWS_SOUND_MAP
                },
                'video': {
                    'map': self.__DESIGN_VIEWS_VIDEO_MAP
                }
            },
            'fulltext': {
                'artist': {
                    'index': self.__DESIGN_FULLTEXT_ARTIST_INDEX
                },
                'everything': {
                    'index': self.__DESIGN_FULLTEXT_EVERYTHING_INDEX
                }
            }
        }
        self.database.create(views)

    def __init__(self):
        """Initialises a new connection to couch and
        creates a new mis databse if nonexistent"""
        LOG.info('initialising database')
        database_name = CONFIG.get('couchdb', 'database')
        host = CONFIG.get('couchdb', 'hostname')
        port = CONFIG.get('couchdb', 'port')
        database_uri = 'http://' + host + ':' + port + '/'
        self.server = Server(database_uri)
        try:
            # This statement appears to do nothing, but is used to
            # make sure the database is reachable.
            self.server.version
        except AttributeError as error:
            if not test_tcp_connection(host, int(port)):
                LOG.critical("couchdb cannot be reached at " + database_uri)
                exit(1)
            else:
                LOG.error('unknown AttributeError thrown')
                raise error
        try:
            LOG.debug('opening database')
            self.database = self.server[database_name]
        except (ResourceNotFound):
            LOG.info('creating database')
            # The database didn't exist. Lets create it.
            self.database = self.server.create(database_name)
            self.create_views()

    def iterate_all_files(self):
        """With a big database, this is probably a bad idea. this
iterates through every single document."""
        for entry in self.database:
            yield (entry)

    def get_document(self, shasum):
        """extracts a (full) document from the database using the
shasum as an identifier"""
        assert shasum is not None
        assert shasum != ''
        LOG.debug('getting document')
        result = None
        try:
            result = self.database[shasum]
            # make sure it actually exists
        except (ResourceNotFound) as error:
            LOG.error("don't have that document, doesn't exist:" + str(error))
        return result

    def add_userdata(self, shasum, data):
        """Adds userdata to the database shasum"""
        LOG.debug('add userdata')
        shasum = unicode(shasum)
        user = getuser()
        node = platform_node()
        user_key = node + ':' + user
        userdata = {}
        if not self.file_exists(shasum):
            LOG.error('trying to add userdata to nonexistent file' + shasum)
            return None
        entry = self.database[shasum]
        userdatalist = {}
        if 'userdata' in entry:
            userdatalist = entry['userdata']
        if user_key in userdatalist:
            userdata = userdatalist[user_key]
        userdata.update(data)
        userdatalist[user_key] = userdata
        entry['userdata'] = userdatalist
        self.database[shasum] = entry

    def add_data(self, shasum, name, data):
        """adds data to a record"""
        assert shasum is not None
        shasum = unicode(shasum)
        name = unicode(name)
        LOG.debug('adding data')
        if self.file_exists(shasum):
            mis_file = self.database[shasum]
            if name not in mis_file or mis_file[name] != data:
                LOG.info(shasum + " info " + name + " has changed, updating")
                mis_file[name] = data
                self.database[shasum] = mis_file
        else:  # create when nonexistent
            LOG.info(shasum + " info " + name + " added")
            entry = {'_id': shasum, name: data}
            self.database.create(entry)

    def add_path(self, shasum, node, path):
        """Adds a path to the database"""
        assert shasum is not None
        shasum = unicode(shasum)
        node = unicode(node)
        path = unicode(path)
        LOG.debug('adding path ' + path + " to " + shasum)
        path_info = {'node': node, 'path': path}
        if self.file_exists(shasum):
            mis_file = self.database[shasum]
            mis_file['paths'].append(path_info)
            self.database[mis_file['_id']] = mis_file
        else:  # create when nonexistent
            entry = {'_id': shasum, 'paths': [path_info]}
            self.database.create(entry)

    def file_exists(self, shasum):
        """Checks if a file (shasum) exists in the database, and
        returns the entry when found"""
        assert shasum is not None
        shasum = unicode(shasum)
        result = None
        LOG.debug('checking if file exists: ' + shasum)
        try:
            # Note: the following line triggers the
            # ResourceNotFound if the sha is not known, this is
            # the way we catch whether it exists.
            self.database[shasum]  # pylint: disable-msg=W0104
            result = shasum
        except ResourceNotFound:
            LOG.debug('trying to find nonexistent entry ' + shasum)
        return result

    def path_exists(self, path, node=None):
        """Checks whether a certain path exists and returns True
        or False"""
        if node is None:
            node = platform_node
        node = unicode(node)
        path = unicode(path)
        LOG.debug('path exists: ' + node + ':' + path)
        result = None
        key = [node, path]
        results = self.database.view('views/paths', key=key)
        if (len(results) > 0):
            result = results.rows[0]['value']
        return result
#!/usr/bin/python

import urllib2
import uuid
import feedparser
from couchdb.client import Server

server = Server()
try:
    db = server.create('cuke')
except:
    db = server['cuke']

feeds = ['http://freemusicarchive.org/interesting.atom']
# for more music, include the 'recent' feed
#feeds.append('http://freemusicarchive.org/recent.atom')

for f in feeds:
    feed = feedparser.parse(f)
    for track in feed.entries:
        track_doc = {}
        track_doc['type'] = 'track'
        if track.enclosures:
            track_link = track.enclosures[0].href
            track_doc['stream'] = track_link
            track_uuid = uuid.uuid5(uuid.NAMESPACE_URL, str(track_link)).hex
            track_doc['_id'] = track_uuid
            track_doc['artist_name'] = track.author
            title_split = track.title.split(":")
            track_doc['album_name'] = title_split[1].strip()
            track_doc['name'] = title_split[2].strip()
Ejemplo n.º 30
0
 def clear(self):
     s = Server()
     del s['python-tests']
     db = s.create('python-tests')
Ejemplo n.º 31
0
class Store(object):
    """
        ERS store
    """
    def __init__(self, url=DEFAULT_STORE_ADMIN_URI, **client_opts):
        self.logger = logging.getLogger('ers-store')
        self._server = Server(url=url, **client_opts)

        self.db_names = {'public': ERS_PUBLIC_DB,
                'private': ERS_PRIVATE_DB,
                'cache': ERS_CACHE_DB,}

        # Add aggregate functions
        # for method_name in ('docs_by_entity', 'by_property', 'by_property_value'):
        #    self.add_aggregate(method_name)

        # Check the status of the databases
        self._ers_dbs = {}
        self._repair()

    def __getitem__(self, dbname):
        return self._ers_dbs[dbname]
        # return ERSDatabase(self._db_uri(dbname), server=self)

    def __iter__(self):
        for dbname in self.all_dbs():
            yield self._ers_dbs[dbname]
            # yield ERSDatabase(self._db_uri(dbname), server=self)

    @classmethod
    def add_aggregate(cls, method_name):
        """
        """
        def aggregate(self, *args, **kwargs):
            return chain(*[getitem(self[db_name], method_name)(*args, **kwargs).iterator()
                            for db_name in ALL_DBS])
        aggregate.__doc__ = """Calls method {}() of all databases in the store and returns an iterator over combined results""".format(method_name)
        aggregate.__name__ = method_name
        setattr(cls, method_name, aggregate)

    def reset(self):
        for db_name in ALL_DBS:
            try:
                del self._server[db_name]
            except http.ResourceNotFound:
                pass
        self._repair()

    def by_property_value(self, property, value=None):
        results = []
        for db in self._ers_dbs.itervalues():
            for res in db.by_property_value(property, value):
                results.append(res)
        return results

    def get_ers_db(self, dbname, **params):
        """
        Try to return an ERSDatabase object for dbname.
        """
        return self._ers_dbs[dbname]

    def info(self):
        return self._server.config()['couchdb']

    def _repair(self):
        # Authenticate with the local store
        # user, password = auth
        try:
            state_db = self._server[ERS_STATE_DB]
        except http.ResourceNotFound:
            state_db = self._server.create(ERS_STATE_DB)
        if not '_local/state' in state_db:
            state_db.save(state_doc())
        if not '_design/index' in state_db:
            state_db.save(index_doc())
        self._ers_dbs[ERS_STATE_DB] = ERSDatabase(state_db)

        for dbname in ALL_DBS:
            # Recreate database if needed
            try:
                db = self._server[dbname]
            except http.ResourceNotFound:
                db = self._server.create(dbname)

            # Create index design doc if needed
            if not '_design/index' in db:
                db.save(index_doc())

            ## Create state doc in the public database if needed
            #if dbname == ERS_PUBLIC_DB:
            #    if not '_local/state' in db:
            #        db.save(state_doc())

            # Save the ERSDatabase object
            self._ers_dbs[dbname] = ERSDatabase(db)
Ejemplo n.º 32
0
class Model:
    def __init__(self):
        try:
            self.__server = Server()
        except:
            print 'can not connect to Couchdb:%s' % (settings.c['db_url'])

        self.__db = {}
        self.__db_name = settings.c['db_name']
        DEBUG.p(self.__db_name.items())
        for (k, v) in self.__db_name.items():
            try:
                self.__db[v] = self.__server.create(v)
            except:
                self.__db[v] = self.__server[v]

    def create(self, dbname):
        return self.__server.create(dbname)

    def delete(self, dbname):
        try:
            self.__server.delete(dbname)
        except:
            print 'database %s doeso not exist!!!' % (dbname)

    def clearDB(self, dbname):
        try:
            self.__server.delete(dbname)
        except:
            print 'database %s doeso not exist!!!' % (dbname)

        self.__db[v] = self.__server.create(dbname)

    def getAllDoc(self):
        return self.__db[self.__db_name['cover']]

    def addOneDoc(self, doc):
        keyStr = '%s:%s' % (doc['artist'], doc['album_name'])
        doc['_id'] = hashlib.md5(keyStr.encode('ascii', 'ignore')).hexdigest()
        print '_id:%s' % (doc['_id'])
        doc_id, doc_rev = self.__db[self.__db_name['cover']].save(doc)
        print 'addOneDoc done'

    def delOneDoc(self, doc):
        self.__db[self.__db_name['cover']].delete(doc)

    def getById(self, artist, albumName):
        keyStr = '%s:%s' % (artist, albumName)
        _id = hashlib.md5(keyStr.encode('ascii', 'ignore')).hexdigest()
        print _id
        album = self.__db[self.__db_name['cover']].get(_id)
        if album:
            return dict(album)
        return album
        #map_fun = '''function(doc) {
        #     if (doc._id == '%s')
        #         emit(doc, null);
        #         }''' % (_id)
        #albums = self.__db[self.__db_name['cover']].query(map_fun)
        #for album in albums:
        #    return album.key
        #return None

    def getById2(self, id):
        print id
        album = self.__db[self.__db_name['cover']].get(id)
        if album:
            return dict(album)
        return album

    def getByKeyValue(self, key, value):
        map_fun = '''function(doc) {
             if (doc.%s == '%s')
                 emit(doc, null);
                 }''' % (key, value)
        albums = self.__db[self.__db_name['cover']].query(map_fun)
        for album in albums:
            return album.key
        return None

    def getByArtist(self, artist):
        map_fun = '''function(doc) {
             if (doc.artist == '%s')
                 emit(doc, null);
                 }''' % (artist)
        albums = self.__db[self.__db_name['cover']].query(map_fun)
        for album in albums:
            return album.key
        return None

    def getByAbbumName(self, albumName):
        map_fun = '''function(doc) {
             if (doc.album_name == '%s')
                 emit(doc, null);
                 }''' % (albumName)
        albums = self.__db[self.__db_name['cover']].query(map_fun)
        for album in albums:
            return album.key
        return None

    def getByCopyRight(self, copyRight):
        map_fun = '''function(doc) {
             if (doc.copy_right == '%s')
                 emit(doc, null);
                 }''' % (copyRight)
        albums = self.__db[self.__db_name['cover']].query(map_fun)
        for album in albums:
            return album.key
        return None

    def getRandom(self, seed=11111):
        db = self.getAllDoc()
        tenNews = []
        for id in db:
            rd = random.randint(0, seed)
            if rd != 1:
                continue
            if len(tenNews) < 10:
                #print db[id]
                tenNews.append(db[id])
        return tenNews
Ejemplo n.º 33
0
class CouchDBBackend(object):

    @classmethod
    def load_from_config(cls, config):
        settings = config.registry.settings
        return CouchDBBackend(
            host=settings['backend.db_host'],
            db_name=os.environ.get('DB_NAME', settings['backend.db_name']),
        )

    def __init__(self, host, db_name, id_generator):
        self.server = Server(host)
        self.db_name = db_name

        try:
            self.create_db_if_not_exist()
        except socket.error as e:
            raise CouchDBBackendConnectionError(
                "Unable to connect to the CouchDB server: %s - %s" % (host, e))

        self._db = self.server[self.db_name]
        self.sync_views()

    def delete_db(self):
        del self.server[self.db_name]

    def create_db_if_not_exist(self):
        try:
            self.server.create(self.db_name)
            logger.info('Creating and using db "%s"' % self.db_name)
        except PreconditionFailed:
            logger.info('Using db "%s".' % self.db_name)

    def sync_views(self):
        ViewDefinition.sync_many(self.server[self.db_name], docs)

    def __get_raw_user_token(self, user_id):
        try:
            return views.usertokens(self._db, key=user_id).rows[0].value
        except IndexError:
            raise UserTokenNotFound(user_id)

    def get_user_token(self, user_id):
        """Returns the information associated with a user token"""
        usertoken = dict(**self.__get_raw_user_token(user_id))
        return usertoken['token']

    def add_token(self, user_id, token):
        # Check that the token doesn't already exist.
        try:
            self.__get_raw_user_token(user_id)
            raise UserTokenAlreadyExist(user_id)
        except UserTokenNotFound:
            pass

        doc = dict(token=token, user_id=user_id, type='usertoken')
        self._db.save(doc)

    def __get_raw_state(self, session_id):
        try:
            return views.states(self._db, key=session_id).rows[0].value
        except IndexError:
            raise StateNotFound(session_id)

    def get_state(self, session_id):
        """Retrives the session_id state."""
        state_doc = dict(**self.__get_raw_state(session_id))
        return state_doc['state']

    def set_state(self, session_id):
        """Set a session_id state."""
        try:
            doc = self.__get_raw_state(session_id)
        except StateNotFound:
            doc = {"type": "fxa_oauth_states", "session_id": session_id}

        doc['state'] = uuid.uuid4().hex
        self._db.save(doc)
        return doc['state']

    def get_or_set_state(self, session_id):
        """Retrieves or creates a state for the session_id"""
        try:
            return self.get_state(session_id)
        except StateNotFound:
            return self.set_state(session_id)

    def __get_raw_redirect_uri(self, state):
        try:
            return views.redirect_uris(self._db, key=state).rows[0].value
        except IndexError:
            raise RedirectURINotFound(session_id)

    def get_state(self, state):
        """Retrives the session_id state."""
        redirect_uri_doc = dict(**self.__get_raw_state(state))
        return redirect_uri_doc['redirect_uri']

    def set_state(self, state, redirect_uri):
        """Set a redirect_uri."""
        try:
            doc = self.__get_raw_state(state)
        except RedirectURINotFound:
            doc = {"type": "fxa_oauth_redirect_uri", "state": state}
        doc['redirect_uri'] = redirect_uri
        self._db.save(doc)

    def __get_raw_oauth_access_token(self, session_id):
        try:
            return views.access_tokens(self._db, key=session_id).rows[0].value
        except IndexError:
            raise OAuthAccessTokenNotFound(session_id)

    def get_oauth_access_token(self, session_id):
        """Retrives the session_id oauth_access_token."""
        token_doc = dict(**self.__get_raw_oauth_access_tokens(session_id))
        return token_doc['access_token']

    def set_oauth_access_token(self, session_id, access_token):
        """Set the session_id oauth_access_token."""
        try:
            doc = self.__get_raw_oauth_access_tokens(session_id)
        except OAuthAccessTokenNotFound:
            doc = {"type": "fxa_oauth_access_tokens", "session_id": session_id}

        doc['access_token'] = access_token
        self._db.save(doc)
Ejemplo n.º 34
0
Archivo: __init__.py Proyecto: nex3/mdb
class Database:
    def __init__(self, server, name):
        self.server = Server(server)
        if name in self.server:
            self.db = self.server[name]
        else:
            self.db = self.server.create(name)
            self._load_views()
        self.view = self.db.view('_view/update/mtime')

    def add(self, path):
        song = self._song_for(os.path.realpath(path))
        if song is None: return

        song = self._dict_for(song)
        doc = self._doc_for(song)
        if doc: song["_rev"] = doc.value["_rev"]
        self.db[song["_id"]] = song

    def add_many(self, paths):
        paths = map(os.path.realpath, paths)
        def updated_file(path):
            doc = self._doc_for(path)
            if not doc: return True
            return util.mtime(path) > doc.value["mtime"]

        songs = filter(None, map(self._song_for, filter(updated_file, paths)))
        if not songs: return
        songs = map(self._dict_for, songs)
        for song in songs:
            doc = self._doc_for(song)
            if not doc: continue
            song["_rev"] = doc.value["_rev"]
        self.db.update(songs)

    def remove(self, path):
        del self.db[_id(path)]

    def remove_docs(self, songs):
        for song in songs:
            song["_deleted"] = True
        self.db.update(songs)

    def update(self, doc, path):
        song = self._song_for(path)
        if song is None: return

        song = self._dict_for(song)
        song["_rev"] = doc["_rev"]
        self.db[song["_id"]] = song

    def docs_beneath(self, path):
        path = util.qdecode(path).split(os.path.sep)
        return [row.value for row in self.db.view('_view/tree/by-path', startkey=path, endkey=path + [{}])]

    def _song_for(self, path):
        try: return MusicFile(path)
        except IOError: return None

    def _dict_for(self, song):
        d = {}
        for tag in SAVED_METATAGS + song.realkeys():
            val = song(tag)
            if val:
                if isinstance(val, basestring):
                    if tag in QUOTED_TAGS:
                        val = util.qdecode(val)
                    else:
                        val = util.fsdecode(val)
                    if not tag in SINGLETON_TAGS:
                        val = val.split("\n")
                d[tag] = val
        for tag, default in DEFAULTS.items():
            if not tag in song: song[tag] = default
        d["~path"] = d["~filename"].split(os.path.sep)
        # CouchDB doesn't like apostrophes in keys for some reason...
        d["_id"] = _id(song.key)
        return d

    def _doc_for(self, song):
        docs = list(self.view[_id(song) if isinstance(song, basestring) else song["_id"]])
        if not docs: return None
        return docs[0]

    def _load_views(self):
        for view in glob.glob(util.data_path('views', '*.json')):
            name = os.path.basename(view)[0:-5]
            f = open(view)
            # Can't use __setitem__ 'cause it assumes content is a hash
            self.db.resource.put('_design/' + name, content=f.read())
            f.close()
Ejemplo n.º 35
0
Archivo: debie.py Proyecto: rocel/stage
        f = feedparser.parse(
            'http://crondia.blogspot.com/feeds/posts/default?alt=rss')
        for entry in f.entries:
            if entry.title in as_cronicas:
                unicode('"cronica velha:", entry.title', 'UTF-8', 'REPLACE')
                continue

            unicode('"cronica NOVA:", entry.title', 'UTF-8', 'REPLACE')
            uma_cronica = Cronica(titulo=entry.title, texto=entry.description)
            uma_cronica.store(cronica)


server = Server('http://localhost:5984')

if not 'manchete' in server:
    manchete = server.create('manchete')
if not 'noticia' in server:
    noticia = server.create('noticia')
if not 'receita' in server:
    receita = server.create('receita')
if not 'artigo' in server:
    artigo = server.create('artigo')
if not 'resenha' in server:
    resenha = server.create('resenha')
if not 'horoscopo' in server:
    horoscopo = server.create('horoscopo')
if not 'cronica' in server:
    cronica = server.create('cronica')
else:
    manchete = server['manchete']
    noticia = server['noticia']
Ejemplo n.º 36
0
from couchdb.client import Server
import json
import fnmatch

server = Server('http://*****:*****@172.26.133.237:5984')

if not 'parties_data' in server:
    db = server.create('parties_data')
else:
    db = server['parties_data']

with open('../../../../../grouped_election_data(1).json') as jsonfile:
    data = json.load(jsonfile)
    jsonfile.close()
    result = {}

    feature_list = data['features']

    for row in feature_list:
        data = json.loads(json.dumps(row))
        coordinate = str(
            data['geometry']['coordinates'])  # _id has to be a string
        property_dict = {
            i: j
            for i, j in data['properties'].items() if j != None
        }  # take out null
        pattern = '*_percent'
        doc = {}
        for item in property_dict:
            if fnmatch.fnmatch(item, pattern):
                party_name = item.replace('_percent', '')
Ejemplo n.º 37
0
class CouchDBBackend(object):

    @classmethod
    def load_from_config(cls, config):
        settings = config.registry.settings

        generator = config.maybe_dotted(settings['daybed.id_generator'])
        return CouchDBBackend(
            host=settings['backend.db_host'],
            db_name=os.environ.get('DB_NAME', settings['backend.db_name']),
            id_generator=generator(config)
        )

    def __init__(self, host, db_name, id_generator):
        self.server = Server(host)
        self.db_name = db_name

        try:
            self.create_db_if_not_exist()
        except socket.error as e:
            raise CouchDBBackendConnectionError(
                "Unable to connect to the CouchDB server: %s - %s" % (host, e))

        self._db = self.server[self.db_name]
        self.sync_views()
        self._generate_id = id_generator

    def delete_db(self):
        del self.server[self.db_name]

    def create_db_if_not_exist(self):
        try:
            self.server.create(self.db_name)
            logger.info('Creating and using db "%s"' % self.db_name)
        except (PreconditionFailed, Unauthorized):
            logger.info('Using db "%s".' % self.db_name)

    def sync_views(self):
        ViewDefinition.sync_many(self.server[self.db_name], docs)

    def get_models(self, principals):
        principals = list(set(principals))
        models = {}
        for result in views.models(self._db, keys=principals).rows:
            doc = result.value
            _id = doc["_id"]
            models[_id] = {
                "id": _id,
                "title": doc["definition"].get("title", _id),
                "description": doc["definition"].get("description", "")
            }
        return list(models.values())

    def __get_raw_model(self, model_id):
        try:
            doc = views.model_definitions(self._db, key=model_id).rows[0]
            return doc.value
        except IndexError:
            raise backend_exceptions.ModelNotFound(model_id)

    def get_model_definition(self, model_id):
        return self.__get_raw_model(model_id)['definition']

    def __get_raw_records(self, model_id):
        # Make sure the model exists.
        self.__get_raw_model(model_id)
        return views.records(self._db, key=model_id).rows

    def get_records(self, model_id, raw_records=None):
        return [r["record"] for r in
                self.get_records_with_authors(model_id, raw_records)]

    def get_records_with_authors(self, model_id, raw_records=None):
        if raw_records is None:
            raw_records = self.__get_raw_records(model_id)
        records = []
        for item in raw_records:
            item.value['record']['id'] = item.value['_id'].split('-')[1]
            records.append({"authors": item.value['authors'],
                            "record": item.value['record']})
        return records

    def __get_raw_record(self, model_id, record_id):
        key = u'-'.join((model_id, record_id))
        try:
            return views.records_all(self._db, key=key).rows[0].value
        except IndexError:
            raise backend_exceptions.RecordNotFound(
                u'(%s, %s)' % (model_id, record_id)
            )

    def _model_exists(self, model_id):
        try:
            self.__get_raw_model(model_id)
            return True
        except backend_exceptions.ModelNotFound:
            return False

    def _record_exists(self, model_id, record_id):
        try:
            self.__get_raw_record(model_id, record_id)
            return True
        except backend_exceptions.RecordNotFound:
            return False

    def get_record(self, model_id, record_id):
        doc = self.__get_raw_record(model_id, record_id)
        record = doc['record']
        record['id'] = record_id
        return record

    def get_record_authors(self, model_id, record_id):
        doc = self.__get_raw_record(model_id, record_id)
        return doc['authors']

    def put_model(self, definition, permissions, model_id=None):
        if model_id is None:
            model_id = self._generate_id(key_exist=self._model_exists)

        try:
            doc = self.__get_raw_model(model_id)
        except backend_exceptions.ModelNotFound:
            doc = {'_id': model_id,
                   'type': 'definition'}
        doc['definition'] = definition
        doc['permissions'] = permissions

        definition_id, _ = self._db.save(doc)
        return definition_id

    def put_record(self, model_id, record, authors, record_id=None):
        doc = {
            'type': 'record',
            'authors': authors,
            'model_id': model_id,
            'record': record}

        if record_id is not None:
            try:
                old_doc = self.__get_raw_record(model_id, record_id)
            except backend_exceptions.RecordNotFound:
                doc['_id'] = '-'.join((model_id, record_id))
            else:
                authors = list(set(authors) | set(old_doc['authors']))
                doc['authors'] = authors
                old_doc.update(doc)
                doc = old_doc
        else:
            key_exist = functools.partial(self._record_exists, model_id)
            record_id = self._generate_id(key_exist=key_exist)
            doc['_id'] = '-'.join((model_id, record_id))

        self._db.save(doc)
        return record_id

    def delete_record(self, model_id, record_id):
        doc = self.__get_raw_record(model_id, record_id)
        if doc:
            self._db.delete(doc)
        return doc

    def delete_records(self, model_id):
        results = self.__get_raw_records(model_id)
        for result in results:
            self._db.delete(result.value)
        return self.get_records(model_id, raw_records=results)

    def delete_model(self, model_id):
        """DELETE ALL THE THINGS"""

        # Delete the associated data if any.
        records = self.delete_records(model_id)

        try:
            doc = views.model_definitions(self._db, key=model_id).rows[0].value
        except IndexError:
            raise backend_exceptions.ModelNotFound(model_id)

        # Delete the model definition if it exists.
        self._db.delete(doc)
        return {"definition": doc["definition"],
                "permissions": doc["permissions"],
                "records": records}

    def __get_raw_token(self, credentials_id):
        try:
            return views.tokens(self._db, key=credentials_id).rows[0].value
        except IndexError:
            raise backend_exceptions.CredentialsNotFound(credentials_id)

    def get_token(self, credentials_id):
        """Returns the information associated with a credentials_id"""
        credentials = dict(**self.__get_raw_token(credentials_id))
        return credentials['token']

    def get_credentials_key(self, credentials_id):
        """Retrieves a token by its id"""
        credentials = dict(**self.__get_raw_token(credentials_id))
        return credentials['credentials']['key']

    def store_credentials(self, token, credentials):
        # Check that the token doesn't already exist.
        assert 'id' in credentials and 'key' in credentials
        try:
            self.__get_raw_token(credentials['id'])
            raise backend_exceptions.CredentialsAlreadyExist(credentials['id'])
        except backend_exceptions.CredentialsNotFound:
            pass

        doc = dict(token=token, credentials=credentials, type='token')
        self._db.save(doc)

    def get_model_permissions(self, model_id):
        doc = self.__get_raw_model(model_id)
        return doc['permissions']
Ejemplo n.º 38
0
parser.add_argument('--year', type=int, default=2018)
parser.add_argument('--monthlength', type=int, default=12)
parser.add_argument('--end', type=str, default='2018,12,31')

args = parser.parse_args()
# argsparser
url = 'http://couchdb.socmedia.bigtwitter.cloud.edu.au/twitter/_design/twitter/_view/summary'
BATCHSIZE = args.batch
#该地区在sydney那里

start_key = '[\"' + args.location + '\",' + args.start + ']'
end_key = '[\"' + args.location + '\",' + args.end + ']'
month = args.monthlength
serverName = args.location + str(args.year)
try:
    db = secure_remote_server.create(serverName)
except:
    print('database already exist!!')
params = {
    'include_docs': 'true',
    'reduce': 'false',
    'start_key': start_key,
    'end_key': end_key,
    "skip": "0",
    "limit": str(BATCHSIZE)
}
TOTALSIZE = args.total_forMonth

count = 1
tweetlist = []
while count <= month:
Ejemplo n.º 39
0
class Model:
    def __init__(self):
        try:
            self.__server = Server()
        except:
            print 'can not connect to Couchdb:%s'%(settings.c['db_url'])

        self.__db = {} 
        self.__db_name = settings.c['db_name']
        DEBUG.p(self.__db_name.items())
        for (k, v) in self.__db_name.items():
            try:
                self.__db[v] = self.__server.create(v)
            except:
                self.__db[v] = self.__server[v]

    def create(self, dbname):
        return self.__server.create(dbname) 

    def delete(self, dbname):
        try:
           self.__server.delete(dbname)
        except:
           print 'database %s doeso not exist!!!'%(dbname)
        
    def clearDB(self, dbname):
        try:
           self.__server.delete(dbname)
        except:
           print 'database %s doeso not exist!!!'%(dbname)

        self.__db[v] = self.__server.create(dbname)

    def getAllDoc(self):
        return self.__db[self.__db_name['cover']] 
        
    def addOneDoc(self, doc):
        keyStr = '%s:%s'%(doc['artist'], doc['album_name']) 
        doc['_id'] = hashlib.md5(keyStr.encode('ascii', 'ignore')).hexdigest()
        print '_id:%s'%(doc['_id'])
        doc_id, doc_rev = self.__db[self.__db_name['cover']].save(doc)
        print 'addOneDoc done'

    def delOneDoc(self, doc):
        self.__db[self.__db_name['cover']].delete(doc)
    
    def getById(self, artist, albumName):
        keyStr = '%s:%s'%(artist, albumName) 
        _id = hashlib.md5(keyStr.encode('ascii', 'ignore')).hexdigest()
        print _id
        album = self.__db[self.__db_name['cover']].get(_id)
        if album:
           return dict(album)
        return album
        #map_fun = '''function(doc) {
        #     if (doc._id == '%s')
        #         emit(doc, null);
        #         }''' % (_id)
        #albums = self.__db[self.__db_name['cover']].query(map_fun)
        #for album in albums: 
        #    return album.key 
        #return None

    def getById2(self, id):
        print id
        album = self.__db[self.__db_name['cover']].get(id)
        if album:
           return dict(album)
        return album

    def getByKeyValue(self, key, value):
        map_fun = '''function(doc) {
             if (doc.%s == '%s')
                 emit(doc, null);
                 }''' % (key, value)
        albums = self.__db[self.__db_name['cover']].query(map_fun)
        for album in albums: 
            return album.key 
        return None

    def getByArtist(self, artist):
        map_fun = '''function(doc) {
             if (doc.artist == '%s')
                 emit(doc, null);
                 }''' % (artist)
        albums = self.__db[self.__db_name['cover']].query(map_fun)
        for album in albums: 
            return album.key 
        return None

    def getByAbbumName(self, albumName):
        map_fun = '''function(doc) {
             if (doc.album_name == '%s')
                 emit(doc, null);
                 }''' % (albumName)
        albums = self.__db[self.__db_name['cover']].query(map_fun)
        for album in albums: 
            return album.key 
        return None

    def getByCopyRight(self, copyRight):
        map_fun = '''function(doc) {
             if (doc.copy_right == '%s')
                 emit(doc, null);
                 }''' % (copyRight)
        albums = self.__db[self.__db_name['cover']].query(map_fun)
        for album in albums: 
            return album.key 
        return None

    def getRandom(self, seed = 11111):
        db = self.getAllDoc()    
        tenNews = [] 
        for id in db:
            rd = random.randint(0, seed)
            if rd != 1:
               continue
            if len(tenNews) < 10:
               #print db[id]
               tenNews.append(db[id])
        return tenNews
Ejemplo n.º 40
0
class CouchDatabase(ObjectStoreDatabase):
    """
    A U1DB backend that uses Couch as its persistence layer.
    """

    U1DB_TRANSACTION_LOG_KEY = '_transaction_log'
    U1DB_CONFLICTS_KEY = '_conflicts'
    U1DB_OTHER_GENERATIONS_KEY = '_other_generations'
    U1DB_INDEXES_KEY = '_indexes'
    U1DB_REPLICA_UID_KEY = '_replica_uid'

    U1DB_DATA_KEYS = [
        U1DB_TRANSACTION_LOG_KEY,
        U1DB_CONFLICTS_KEY,
        U1DB_OTHER_GENERATIONS_KEY,
        U1DB_INDEXES_KEY,
        U1DB_REPLICA_UID_KEY,
    ]

    COUCH_ID_KEY = '_id'
    COUCH_REV_KEY = '_rev'
    COUCH_U1DB_ATTACHMENT_KEY = 'u1db_json'
    COUCH_U1DB_REV_KEY = 'u1db_rev'

    # the following map describes information about methods usage of
    # properties that have to persist on the underlying database. The format
    # of the map is assumed to be:
    #
    #     {
    #         'property_name': [
    #             ('property_load_method_name', 'property_dump_method_name'),
    #             [('method_1_name', bool),
    #              ...
    #              ('method_N_name', bool)]],
    #         ...
    #     }
    #
    # where the booleans indicate if the property should be stored after
    # each method execution (i.e. if the method alters the property). Property
    # load/dump methods will be run after/before properties are read/written
    # to the underlying db.
    PERSISTENCY_MAP = {
        U1DB_TRANSACTION_LOG_KEY: [
            ('_load_transaction_log_from_json', None),
            [('_get_transaction_log', False),
             ('_get_generation', False),
             ('_get_generation_info', False),
             ('_get_trans_id_for_gen', False),
             ('whats_changed', False),
             ('_put_and_update_indexes', True)]],
        U1DB_CONFLICTS_KEY: [
            (None, None),
            [('_has_conflicts', False),
             ('get_doc_conflicts', False),
             ('_prune_conflicts', False),
             ('resolve_doc', False),
             ('_replace_conflicts', True),
             ('_force_doc_sync_conflict', True)]],
        U1DB_OTHER_GENERATIONS_KEY: [
            ('_load_other_generations_from_json', None),
            [('_get_replica_gen_and_trans_id', False),
             ('_do_set_replica_gen_and_trans_id', True)]],
        U1DB_INDEXES_KEY: [
            ('_load_indexes_from_json', '_dump_indexes_as_json'),
            [('list_indexes', False),
             ('get_from_index', False),
             ('get_range_from_index', False),
             ('get_index_keys', False),
             ('_put_and_update_indexes', True),
             ('create_index', True),
             ('delete_index', True)]],
        U1DB_REPLICA_UID_KEY: [
            (None, None),
            [('_allocate_doc_rev', False),
             ('_put_doc_if_newer', False),
             ('_ensure_maximal_rev', False),
             ('_prune_conflicts', False),
             ('_set_replica_uid', True)]]}

    @classmethod
    def open_database(cls, url, create):
        """
        Open a U1DB database using CouchDB as backend.

        @param url: the url of the database replica
        @type url: str
        @param create: should the replica be created if it does not exist?
        @type create: bool

        @return: the database instance
        @rtype: CouchDatabase
        """
        # get database from url
        m = re.match('(^https?://[^/]+)/(.+)$', url)
        if not m:
            raise InvalidURLError
        url = m.group(1)
        dbname = m.group(2)
        server = Server(url=url)
        try:
            server[dbname]
        except ResourceNotFound:
            if not create:
                raise DatabaseDoesNotExist()
        return cls(url, dbname)

    def __init__(self, url, dbname, replica_uid=None, full_commit=True,
                 session=None):
        """
        Create a new Couch data container.

        @param url: the url of the couch database
        @type url: str
        @param dbname: the database name
        @type dbname: str
        @param replica_uid: an optional unique replica identifier
        @type replica_uid: str
        @param full_commit: turn on the X-Couch-Full-Commit header
        @type full_commit: bool
        @param session: an http.Session instance or None for a default session
        @type session: http.Session
        """
        # save params
        self._url = url
        self._full_commit = full_commit
        self._session = session
        # configure couch
        self._server = Server(url=self._url,
                              full_commit=self._full_commit,
                              session=self._session)
        self._dbname = dbname
        # this will ensure that transaction and sync logs exist and are
        # up-to-date.
        try:
            self._database = self._server[self._dbname]
        except ResourceNotFound:
            self._server.create(self._dbname)
            self._database = self._server[self._dbname]
        ObjectStoreDatabase.__init__(self, replica_uid=replica_uid)

    #-------------------------------------------------------------------------
    # methods from Database
    #-------------------------------------------------------------------------

    def _get_doc(self, doc_id, check_for_conflicts=False):
        """
        Get just the document content, without fancy handling.

        @param doc_id: The unique document identifier
        @type doc_id: str
        @param include_deleted: If set to True, deleted documents will be
            returned with empty content. Otherwise asking for a deleted
            document will return None.
        @type include_deleted: bool

        @return: a Document object.
        @type: u1db.Document
        """
        cdoc = self._database.get(doc_id)
        if cdoc is None:
            return None
        has_conflicts = False
        if check_for_conflicts:
            has_conflicts = self._has_conflicts(doc_id)
        doc = self._factory(
            doc_id=doc_id,
            rev=cdoc[self.COUCH_U1DB_REV_KEY],
            has_conflicts=has_conflicts)
        contents = self._database.get_attachment(
            cdoc,
            self.COUCH_U1DB_ATTACHMENT_KEY)
        if contents:
            doc.content = json.loads(contents.read())
        else:
            doc.make_tombstone()
        return doc

    def get_all_docs(self, include_deleted=False):
        """
        Get the JSON content for all documents in the database.

        @param include_deleted: If set to True, deleted documents will be
            returned with empty content. Otherwise deleted documents will not
            be included in the results.
        @type include_deleted: bool

        @return: (generation, [Document])
            The current generation of the database, followed by a list of all
            the documents in the database.
        @rtype: tuple
        """
        generation = self._get_generation()
        results = []
        for doc_id in self._database:
            if doc_id.startswith(self.U1DB_DATA_DOC_ID_PREFIX):
                continue
            doc = self._get_doc(doc_id, check_for_conflicts=True)
            if doc.content is None and not include_deleted:
                continue
            results.append(doc)
        return (generation, results)

    def _put_doc(self, doc):
        """
        Update a document.

        This is called everytime we just want to do a raw put on the db (i.e.
        without index updates, document constraint checks, and conflict
        checks).

        @param doc: The document to update.
        @type doc: u1db.Document

        @return: The new revision identifier for the document.
        @rtype: str
        """
        # prepare couch's Document
        cdoc = CouchDocument()
        cdoc[self.COUCH_ID_KEY] = doc.doc_id
        # we have to guarantee that couch's _rev is consistent
        old_cdoc = self._database.get(doc.doc_id)
        if old_cdoc is not None:
            cdoc[self.COUCH_REV_KEY] = old_cdoc[self.COUCH_REV_KEY]
        # store u1db's rev
        cdoc[self.COUCH_U1DB_REV_KEY] = doc.rev
        # save doc in db
        self._database.save(cdoc)
        # store u1db's content as json string
        if not doc.is_tombstone():
            self._database.put_attachment(
                cdoc, doc.get_json(),
                filename=self.COUCH_U1DB_ATTACHMENT_KEY)
        else:
            self._database.delete_attachment(
                cdoc,
                self.COUCH_U1DB_ATTACHMENT_KEY)

    def get_sync_target(self):
        """
        Return a SyncTarget object, for another u1db to synchronize with.

        @return: The sync target.
        @rtype: CouchSyncTarget
        """
        return CouchSyncTarget(self)

    def create_index(self, index_name, *index_expressions):
        """
        Create a named index, which can then be queried for future lookups.

        @param index_name: A unique name which can be used as a key prefix.
        @param index_expressions: Index expressions defining the index
            information.
        """
        if index_name in self._indexes:
            if self._indexes[index_name]._definition == list(
                    index_expressions):
                return
            raise errors.IndexNameTakenError
        index = InMemoryIndex(index_name, list(index_expressions))
        for doc_id in self._database:
            if doc_id.startswith(self.U1DB_DATA_DOC_ID_PREFIX):
                continue  # skip special files
            doc = self._get_doc(doc_id)
            if doc.content is not None:
                index.add_json(doc_id, doc.get_json())
        self._indexes[index_name] = index

    def close(self):
        """
        Release any resources associated with this database.

        @return: True if db was succesfully closed.
        @rtype: bool
        """
        # TODO: fix this method so the connection is properly closed and
        # test_close (+tearDown, which deletes the db) works without problems.
        self._url = None
        self._full_commit = None
        self._session = None
        #self._server = None
        self._database = None
        return True

    def sync(self, url, creds=None, autocreate=True):
        """
        Synchronize documents with remote replica exposed at url.

        @param url: The url of the target replica to sync with.
        @type url: str
        @param creds: optional dictionary giving credentials.
            to authorize the operation with the server.
        @type creds: dict
        @param autocreate: Ask the target to create the db if non-existent.
        @type autocreate: bool

        @return: The local generation before the synchronisation was performed.
        @rtype: int
        """
        return Synchronizer(self, CouchSyncTarget(url, creds=creds)).sync(
            autocreate=autocreate)

    #-------------------------------------------------------------------------
    # methods from ObjectStoreDatabase
    #-------------------------------------------------------------------------

    def _init_u1db_data(self):
        """
        Initialize u1db configuration data on backend storage.

        A U1DB database needs to keep track of all database transactions,
        document conflicts, the generation of other replicas it has seen,
        indexes created by users and so on.

        In this implementation, all this information is stored in special
        documents stored in the underlying with doc_id prefix equal to
        U1DB_DATA_DOC_ID_PREFIX. Those documents ids are reserved: put_doc(),
        get_doc() and delete_doc() will not allow documents with a doc_id with
        that prefix to be accessed or modified.
        """
        for key in self.U1DB_DATA_KEYS:
            doc_id = '%s%s' % (self.U1DB_DATA_DOC_ID_PREFIX, key)
            doc = self._get_doc(doc_id)
            if doc is None:
                doc = self._factory(doc_id)
                doc.content = {'content': getattr(self, key)}
                self._put_doc(doc)

    #-------------------------------------------------------------------------
    # Couch specific methods
    #-------------------------------------------------------------------------

    INDEX_NAME_KEY = 'name'
    INDEX_DEFINITION_KEY = 'definition'
    INDEX_VALUES_KEY = 'values'

    def delete_database(self):
        """
        Delete a U1DB CouchDB database.
        """
        del(self._server[self._dbname])

    def _dump_indexes_as_json(self):
        """
        Dump index definitions as JSON.
        """
        indexes = {}
        for name, idx in self._indexes.iteritems():
            indexes[name] = {}
            for attr in [self.INDEX_NAME_KEY, self.INDEX_DEFINITION_KEY,
                         self.INDEX_VALUES_KEY]:
                indexes[name][attr] = getattr(idx, '_' + attr)
        return indexes

    def _load_indexes_from_json(self, indexes):
        """
        Load index definitions from stored JSON.

        @param indexes: A JSON representation of indexes as
            [('index-name', ['field', 'field2', ...]), ...].
        @type indexes: str
        """
        self._indexes = {}
        for name, idx_dict in indexes.iteritems():
            idx = InMemoryIndex(name, idx_dict[self.INDEX_DEFINITION_KEY])
            idx._values = idx_dict[self.INDEX_VALUES_KEY]
            self._indexes[name] = idx

    def _load_transaction_log_from_json(self, transaction_log):
        """
        Load transaction log from stored JSON.

        @param transaction_log: A JSON representation of transaction_log as
            [('generation', 'transaction_id'), ...].
        @type transaction_log: list
        """
        self._transaction_log = []
        for gen, trans_id in transaction_log:
            self._transaction_log.append((gen, trans_id))

    def _load_other_generations_from_json(self, other_generations):
        """
        Load other generations from stored JSON.

        @param other_generations: A JSON representation of other_generations
            as {'replica_uid': ('generation', 'transaction_id'), ...}.
        @type other_generations: dict
        """
        self._other_generations = {}
        for replica_uid, [gen, trans_id] in other_generations.iteritems():
            self._other_generations[replica_uid] = (gen, trans_id)
Ejemplo n.º 41
0
class CouchDatabase(ObjectStoreDatabase):
    """A U1DB backend that uses Couch as its persistence layer."""
    @classmethod
    def open_database(cls, url, create):
        """Open a U1DB database using CouchDB as backend."""
        # get database from url
        m = re.match('(^https?://[^/]+)/(.+)$', url)
        if not m:
            raise InvalidURLError
        url = m.group(1)
        dbname = m.group(2)
        server = Server(url=url)
        try:
            server[dbname]
        except ResourceNotFound:
            if not create:
                raise DatabaseDoesNotExist()
        return cls(url, dbname)

    def __init__(self,
                 url,
                 database,
                 replica_uid=None,
                 full_commit=True,
                 session=None):
        """Create a new Couch data container."""
        self._url = url
        self._full_commit = full_commit
        self._session = session
        self._server = Server(url=self._url,
                              full_commit=self._full_commit,
                              session=self._session)
        self._dbname = database
        # this will ensure that transaction and sync logs exist and are
        # up-to-date.
        try:
            self._database = self._server[database]
        except ResourceNotFound:
            self._server.create(database)
            self._database = self._server[database]
        super(CouchDatabase, self).__init__(replica_uid=replica_uid,
                                            document_factory=LeapDocument)

    #-------------------------------------------------------------------------
    # methods from Database
    #-------------------------------------------------------------------------

    def _get_doc(self, doc_id, check_for_conflicts=False):
        """Get just the document content, without fancy handling."""
        cdoc = self._database.get(doc_id)
        if cdoc is None:
            return None
        has_conflicts = False
        if check_for_conflicts:
            has_conflicts = self._has_conflicts(doc_id)
        doc = self._factory(doc_id=doc_id,
                            rev=cdoc['u1db_rev'],
                            has_conflicts=has_conflicts)
        contents = self._database.get_attachment(cdoc, 'u1db_json')
        if contents:
            doc.content = json.loads(contents.getvalue())
        else:
            doc.make_tombstone()
        return doc

    def get_all_docs(self, include_deleted=False):
        """Get the JSON content for all documents in the database."""
        generation = self._get_generation()
        results = []
        for doc_id in self._database:
            if doc_id == self.U1DB_DATA_DOC_ID:
                continue
            doc = self._get_doc(doc_id, check_for_conflicts=True)
            if doc.content is None and not include_deleted:
                continue
            results.append(doc)
        return (generation, results)

    def _put_doc(self, doc):
        """Store document in database."""
        # prepare couch's Document
        cdoc = CouchDocument()
        cdoc['_id'] = doc.doc_id
        # we have to guarantee that couch's _rev is cosistent
        old_cdoc = self._database.get(doc.doc_id)
        if old_cdoc is not None:
            cdoc['_rev'] = old_cdoc['_rev']
        # store u1db's rev
        cdoc['u1db_rev'] = doc.rev
        # save doc in db
        self._database.save(cdoc)
        # store u1db's content as json string
        if not doc.is_tombstone():
            self._database.put_attachment(cdoc,
                                          doc.get_json(),
                                          filename='u1db_json')
        else:
            self._database.delete_attachment(cdoc, 'u1db_json')

    def get_sync_target(self):
        """
        Return a SyncTarget object, for another u1db to synchronize with.
        """
        return CouchSyncTarget(self)

    def create_index(self, index_name, *index_expressions):
        """
        Create a named index, which can then be queried for future lookups.
        """
        if index_name in self._indexes:
            if self._indexes[index_name]._definition == list(
                    index_expressions):
                return
            raise errors.IndexNameTakenError
        index = InMemoryIndex(index_name, list(index_expressions))
        for doc_id in self._database:
            if doc_id == self.U1DB_DATA_DOC_ID:
                continue
            doc = self._get_doc(doc_id)
            if doc.content is not None:
                index.add_json(doc_id, doc.get_json())
        self._indexes[index_name] = index
        # save data in object store
        self._store_u1db_data()

    def close(self):
        """Release any resources associated with this database."""
        # TODO: fix this method so the connection is properly closed and
        # test_close (+tearDown, which deletes the db) works without problems.
        self._url = None
        self._full_commit = None
        self._session = None
        #self._server = None
        self._database = None
        return True

    def sync(self, url, creds=None, autocreate=True):
        """Synchronize documents with remote replica exposed at url."""
        from u1db.sync import Synchronizer
        return Synchronizer(self, CouchSyncTarget(
            url, creds=creds)).sync(autocreate=autocreate)

    #-------------------------------------------------------------------------
    # methods from ObjectStoreDatabase
    #-------------------------------------------------------------------------

    def _init_u1db_data(self):
        if self._replica_uid is None:
            self._replica_uid = uuid.uuid4().hex
        doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID)
        doc.content = {
            'transaction_log': [],
            'conflicts': b64encode(json.dumps({})),
            'other_generations': {},
            'indexes': b64encode(json.dumps({})),
            'replica_uid': self._replica_uid
        }
        self._put_doc(doc)

    def _fetch_u1db_data(self):
        # retrieve u1db data from couch db
        cdoc = self._database.get(self.U1DB_DATA_DOC_ID)
        jsonstr = self._database.get_attachment(cdoc, 'u1db_json').getvalue()
        content = json.loads(jsonstr)
        # set u1db database info
        #self._sync_log = content['sync_log']
        self._transaction_log = content['transaction_log']
        self._conflicts = json.loads(b64decode(content['conflicts']))
        self._other_generations = content['other_generations']
        self._indexes = self._load_indexes_from_json(
            b64decode(content['indexes']))
        self._replica_uid = content['replica_uid']
        # save couch _rev
        self._couch_rev = cdoc['_rev']

    def _store_u1db_data(self):
        doc = self._factory(doc_id=self.U1DB_DATA_DOC_ID)
        doc.content = {
            'transaction_log': self._transaction_log,
            # Here, the b64 encode ensures that document content
            # does not cause strange behaviour in couchdb because
            # of encoding.
            'conflicts': b64encode(json.dumps(self._conflicts)),
            'other_generations': self._other_generations,
            'indexes': b64encode(self._dump_indexes_as_json()),
            'replica_uid': self._replica_uid,
            '_rev': self._couch_rev
        }
        self._put_doc(doc)

    #-------------------------------------------------------------------------
    # Couch specific methods
    #-------------------------------------------------------------------------

    def delete_database(self):
        """Delete a U1DB CouchDB database."""
        del (self._server[self._dbname])

    def _dump_indexes_as_json(self):
        indexes = {}
        for name, idx in self._indexes.iteritems():
            indexes[name] = {}
            for attr in ['name', 'definition', 'values']:
                indexes[name][attr] = getattr(idx, '_' + attr)
        return json.dumps(indexes)

    def _load_indexes_from_json(self, indexes):
        dict = {}
        for name, idx_dict in json.loads(indexes).iteritems():
            idx = InMemoryIndex(name, idx_dict['definition'])
            idx._values = idx_dict['values']
            dict[name] = idx
        return dict
Ejemplo n.º 42
0
import json
import time

# chat with the couchdb server

class Tweet(Document):
	id = IntegerField()
	from_user = TextField()
	text = TextField()
	
# TODO optimize for speed... very verbose
j = open('geotagged_tweets_from_haiti.json')
# open json objects
jsonobjects = [i for i in j]

l = [json.loads(j) for j in jsonobjects]

server = Server('http://127.0.0.1:5984')
try:
	db = server.create('tweetgeneral')
except:
	del server['tweetgeneral']
	db = server.create('tweetgeneral')
	
# for each_tweet in l:
# 	tweet = Tweet(id=each_tweet['id'],from_user=each_tweet['from_user'],text=each_tweet['text'])
# 	tweet.store(db)
# 	


Ejemplo n.º 43
0
Archivo: database.py Proyecto: nido/mis
class Database():
    """The Database class incorperates functions you wish to ask
    the database"""

    jsdir = CONFIG.get('couchdb', 'javascript_directory')

    __DESIGN_VIEWS_PATHS_MAP = \
        file(jsdir + '/design_views_paths_map.js').read()
    __DESIGN_VIEWS_SHASUMS_MAP = \
        file(jsdir + '/design_views_shasums_map.js').read()
    __DESIGN_VIEWS_FORMATS_MAP = \
        file(jsdir + '/design_views_formats_map.js').read()
    __DESIGN_VIEWS_FORMATS_REDUCE = '_sum'
    __DESIGN_VIEWS_SOUND_MAP = \
        file(jsdir + '/design_views_sound_map.js').read()
    __DESIGN_VIEWS_VIDEO_MAP = \
        file(jsdir + '/design_views_sound_map.js').read()
    __DESIGN_FULLTEXT_ARTIST_INDEX = \
        file(jsdir + '/design_fulltext_artist_index.js').read()
    __DESIGN_FULLTEXT_EVERYTHING_INDEX = \
        file(jsdir + '/design_fulltext_artist_index.js').read()

    def create_views(self):
        """creates views and saves them to the database"""
        LOG.info('creating views')
        views = {'_id': '_design/views',
                 'language': 'javascript',
                 'views': {
                     'shasums': {'map': self.__DESIGN_VIEWS_SHASUMS_MAP},
                     'paths': {'map': self.__DESIGN_VIEWS_PATHS_MAP},
                     'formats': {
                         'map': self.__DESIGN_VIEWS_FORMATS_MAP,
                         'reduce': self.__DESIGN_VIEWS_FORMATS_REDUCE},
                     'sound': {'map': self.__DESIGN_VIEWS_SOUND_MAP},
                     'video': {'map': self.__DESIGN_VIEWS_VIDEO_MAP}
                 },
                 'fulltext': {
                 'artist': {'index': self.__DESIGN_FULLTEXT_ARTIST_INDEX},
                 'everything': {'index':
                                self.__DESIGN_FULLTEXT_EVERYTHING_INDEX}
                 }}
        self.database.create(views)

    def __init__(self):
        """Initialises a new connection to couch and
        creates a new mis databse if nonexistent"""
        LOG.info('initialising database')
        database_name = CONFIG.get('couchdb', 'database')
        host = CONFIG.get('couchdb', 'hostname')
        port = CONFIG.get('couchdb', 'port')
        database_uri = 'http://' + host + ':' + port + '/'
        self.server = Server(database_uri)
        try:
            # This statement appears to do nothing, but is used to
            # make sure the database is reachable.
            self.server.version
        except AttributeError as error:
            if not test_tcp_connection(host, int(port)):
                LOG.critical("couchdb cannot be reached at " + database_uri)
                exit(1)
            else:
                LOG.error('unknown AttributeError thrown')
                raise error
        try:
            LOG.debug('opening database')
            self.database = self.server[database_name]
        except(ResourceNotFound):
            LOG.info('creating database')
            # The database didn't exist. Lets create it.
            self.database = self.server.create(database_name)
            self.create_views()

    def iterate_all_files(self):
        """With a big database, this is probably a bad idea. this
iterates through every single document."""
        for entry in self.database:
            yield(entry)

    def get_document(self, shasum):
        """extracts a (full) document from the database using the
shasum as an identifier"""
        assert shasum is not None
        assert shasum != ''
        LOG.debug('getting document')
        result = None
        try:
            result = self.database[shasum]
            # make sure it actually exists
        except (ResourceNotFound) as error:
            LOG.error("don't have that document, doesn't exist:" +
                      str(error))
        return result

    def add_userdata(self, shasum, data):
        """Adds userdata to the database shasum"""
        LOG.debug('add userdata')
        shasum = unicode(shasum)
        user = getuser()
        node = platform_node()
        user_key = node + ':' + user
        userdata = {}
        if not self.file_exists(shasum):
            LOG.error('trying to add userdata to nonexistent file' + shasum)
            return None
        entry = self.database[shasum]
        userdatalist = {}
        if 'userdata' in entry:
            userdatalist = entry['userdata']
        if user_key in userdatalist:
            userdata = userdatalist[user_key]
        userdata.update(data)
        userdatalist[user_key] = userdata
        entry['userdata'] = userdatalist
        self.database[shasum] = entry

    def add_data(self, shasum, name, data):
        """adds data to a record"""
        assert shasum is not None
        shasum = unicode(shasum)
        name = unicode(name)
        LOG.debug('adding data')
        if self.file_exists(shasum):
            mis_file = self.database[shasum]
            if name not in mis_file or mis_file[name] != data:
                LOG.info(shasum + " info " + name + " has changed, updating")
                mis_file[name] = data
                self.database[shasum] = mis_file
        else:  # create when nonexistent
            LOG.info(shasum + " info " + name + " added")
            entry = {'_id': shasum, name: data}
            self.database.create(entry)

    def add_path(self, shasum, node, path):
        """Adds a path to the database"""
        assert shasum is not None
        shasum = unicode(shasum)
        node = unicode(node)
        path = unicode(path)
        LOG.debug('adding path ' + path + " to " + shasum)
        path_info = {'node': node, 'path': path}
        if self.file_exists(shasum):
            mis_file = self.database[shasum]
            mis_file['paths'].append(path_info)
            self.database[mis_file['_id']] = mis_file
        else:  # create when nonexistent
            entry = {'_id': shasum, 'paths': [path_info]}
            self.database.create(entry)

    def file_exists(self, shasum):
        """Checks if a file (shasum) exists in the database, and
        returns the entry when found"""
        assert shasum is not None
        shasum = unicode(shasum)
        result = None
        LOG.debug('checking if file exists: ' + shasum)
        try:
            # Note: the following line triggers the
            # ResourceNotFound if the sha is not known, this is
            # the way we catch whether it exists.
            self.database[shasum]  # pylint: disable-msg=W0104
            result = shasum
        except ResourceNotFound:
            LOG.debug('trying to find nonexistent entry ' + shasum)
        return result

    def path_exists(self, path, node=None):
        """Checks whether a certain path exists and returns True
        or False"""
        if node is None:
            node = platform_node
        node = unicode(node)
        path = unicode(path)
        LOG.debug('path exists: ' + node + ':' + path)
        result = None
        key = [node, path]
        results = self.database.view('views/paths', key=key)
        if(len(results) > 0):
            result = results.rows[0]['value']
        return result