Ejemplo n.º 1
0
def safe_fetch(item):
    '''Fetch model or return None if unauthorized'''
    try:
        resp = yield item.fetch()
        defer.returnValue(resp)
    except Unauthorized:
        return
Ejemplo n.º 2
0
    def _render_array(self, model, limit, context):
        tree = list()
        flattened = list()
        columns = list()

        if not context.models or model != context.models[-1]:
            # this fixes issue with the fact that write() method is passed
            # the context of the current model instead of the parrent
            context = context.descend(model)
        yield self._build_tree(tree, model, limit, context)
        self._flatten_tree(flattened, columns, dict(), tree[0], limit)

        headers = [html.tags.th()(x) for x, _ in columns]
        table = html.tags.table()(
            html.tags.thead()(*headers))
        tbody = html.tags.tbody()
        table.content.append(tbody)

        for row in flattened:
            tr = html.tags.tr()
            for column in columns:
                td = html.tags.td()
                value = row.get(column)
                if value:
                    item, cur_context = value
                    td.append(self._format_attribute_item(item, cur_context))
                tr.append(td)

            tbody.append(tr)

        defer.returnValue(table)
Ejemplo n.º 3
0
Archivo: client.py Proyecto: f3at/feat
    def save_document(self, doc):
        assert IDocument.providedBy(doc) or isinstance(doc, dict), repr(doc)
        try:
            self._lock_notifications()

            serialized = self._serializer.convert(doc)
            if IDocument.providedBy(doc):
                following_attachments = dict(
                    (name, attachment) for name, attachment
                    in doc.get_attachments().iteritems()
                    if not attachment.saved)
                doc_id = doc.doc_id
            else:
                following_attachments = dict()
                doc_id = doc.get('_id')
            resp = yield self._database.save_doc(serialized, doc_id,
                                                 following_attachments)
            self._update_id_and_rev(resp, doc)
            for attachment in following_attachments.itervalues():
                attachment.set_saved()

            # now process all the documents which have been registered to
            # be saved together with this document
            if IDocument.providedBy(doc):
                while doc.links.to_save:
                    to_link, linker_roles, linkee_roles = (
                        doc.links.to_save.pop(0))
                    to_link.links.create(doc=doc, linker_roles=linker_roles,
                                         linkee_roles=linkee_roles)
                    yield self.save_document(to_link)

            defer.returnValue(doc)
        finally:
            self._unlock_notifications()
Ejemplo n.º 4
0
def configure_replicator_database(host, port, username=None, password=None):
    """
    Connects to dabatase, checks the version and creates the
    design document used by feat (if it doesn't exist).

    @returns: IDatabaseConnection bound to _replicator database
    """
    database = driver.Database(host, port, '_replicator', username, password)
    connection = database.get_connection()
    version = yield database.get_version()
    if version < (1, 1, 0):
        database.disconnect()
        raise ValueError("Found couchdb version %r. "
                         "_replicator database has been introduced in 1.1.0." %
                         (version, ))
    design_docs = view.DesignDocument.generate_from_views([Replications])
    for doc in design_docs:
        try:
            doc2 = yield connection.get_document(doc.doc_id)
            if doc.views != doc2.views or doc.filters != doc2.filters:
                doc.rev = doc2.rev
                yield connection.save_document(doc)

        except NotFoundError:
            yield connection.save_document(doc)
    defer.returnValue(connection)
Ejemplo n.º 5
0
Archivo: query.py Proyecto: f3at/feat
def select_ids(connection, query, skip=0, limit=None, include_responses=False):
    temp, responses = yield _get_query_response(connection, query)

    total_count = len(temp)
    if limit is not None:
        stop = skip + limit
    else:
        stop = None

    name, direction = query.sorting
    index = first(v.entries for k, v in responses.iteritems() if k.field == name)

    if direction == Direction.DESC:
        index = reversed(index)

    if query.aggregate:
        # we have to copy the collection, because _get_sorted_slice()
        # treats it as a buffer, and modifies the content
        aggregate_index = set(temp)

    r = Result(_get_sorted_slice(index, temp, skip, stop))
    r.total_count = total_count

    # count reductions for aggregated fields based on the view index
    if query.aggregate:
        r.aggregations = list()
        for handler, field in query.aggregate:
            value_index = first(v for k, v in responses.iteritems() if k.field == field)
            r.aggregations.append(handler(x for x in value_iterator(aggregate_index, value_index)))
    if include_responses:
        defer.returnValue((r, responses))
    else:
        defer.returnValue(r)
Ejemplo n.º 6
0
 def _get_agents_at(self, host):
     agents = [host]
     hosted_recp = yield host.get_hosted_recipients()
     for recp in hosted_recp:
         medium = yield self.driver.find_agent(recp)
         agents += [medium.get_agent()]
     defer.returnValue(agents)
Ejemplo n.º 7
0
Archivo: query.py Proyecto: f3at/feat
def values(connection, query, field, unique=True):
    if field not in query.fields:
        raise ValueError("%r doesn't have %s field defined" % (type(query), field))
    query.include_value.append(field)
    query.reset()  # ensures the field condition gets included

    temp, responses = yield _get_query_response(connection, query)
    index = first(v for k, v in responses.iteritems() if k.field == field)
    if not index.includes_values:
        raise ValueError(
            "The query controller of %s field "
            "of %s query is not marked to "
            "keep the value in the cache. You have to enable "
            "it to make query.value() work." % (field, query.name)
        )
    if unique:
        resp = set()
        for x in temp:
            resp.add(index.get_value(x))
        defer.returnValue(list(resp))
    else:
        resp = list()
        for x in temp:
            resp.append(index.get_value(x))
        defer.returnValue(resp)
Ejemplo n.º 8
0
    def testMisconfiguredPostgresFallbackToSqlite(self):
        try:
            import txpostgres
        except ImportError:
            raise SkipTest('txpostgres package is missing')
        postgres = ('postgres://%s:%s@%s/%s' %
                    ('user', 'password', 'localhost', 'name'))
        tmpfile = self._get_tmp_file()
        sqlite = 'sqlite://' + tmpfile

        connstrs = [postgres, sqlite]
        agency_stub = AgencyStub()
        jour = journaler.Journaler(
            on_switch_writer_cb=agency_stub.on_switch_writer)
        jour.set_connection_strings(connstrs)
        jour.insert_entry(**self._generate_entry())

        @defer.inlineCallbacks
        def check():
            w = jour._writer
            self.log('writer is %r', w)
            if isinstance(w, journaler.SqliteWriter):
                try:
                    num = yield self._get_number_of_entries(jour, 1)
                    self.log('num is %d', num)
                    defer.returnValue(num == 1)
                except FailTest, e:
                    self.log('assertation failure: %r', e)
                    defer.returnValue(False)
                defer.returnValue(True)
            defer.returnValue(False)
Ejemplo n.º 9
0
 def _format_attribute_item(self, item, context):
     model = yield safe_fetch(item)
     if not IModel.providedBy(model):
         defer.returnValue("")
     result = yield self._format_attribute(model, context.descend(model),
                                           context, html_links(item))
     defer.returnValue(result)
Ejemplo n.º 10
0
 def assertState(self, _, state):
     self.assertEqual(state, self.manager._get_medium().state)
     if state not in (contracts.ContractState.completed,
                      contracts.ContractState.terminated, ):
         self.assertFailure(self.finished, protocols.ProtocolFailed)
         yield self.finished
     defer.returnValue(self.manager)
Ejemplo n.º 11
0
def _get_query_response(connection, query):
    cache = connection.get_query_cache()
    responses = dict()
    for subquery in query.get_basic_queries():
        # subquery -> list of doc ids
        responses[subquery] = yield cache.query(
            connection, query.factory, subquery)
    defer.returnValue((_calculate_query_response(responses, query), responses))
Ejemplo n.º 12
0
Archivo: common.py Proyecto: f3at/feat
 def model_descend(self, model, *path):
     i = model
     for part in path:
         i = yield i.fetch_item(part)
         if i is None:
             return
         i = yield i.fetch()
     defer.returnValue(i)
Ejemplo n.º 13
0
def get_replication_status(rconnection, source):
    database = rconnection.database
    if not isinstance(database, driver.Database):
        raise TypeError("This procedure would work only for driver connected"
                        " to the real database. It uses public methods which"
                        " are not the part of IDatabaseDriver interface")

    version = yield rconnection.database.get_version()
    if version < (1, 2, 0):
        raise ValueError("CouchDB 1.2.0 required, found %r" % (version, ))

    active_tasks = yield database.couchdb_call(
        database.couchdb.get, '/_active_tasks')
    # In couchdb version >= 1.2.2 the replication_id is suffixed with
    # string literal '+continuous'. Here we cut it off
    for task in active_tasks:
        if (task.get('type') == 'replication' and
            task.get('replication_id', '').endswith('+continuous')):
            task['replication_id'] = task['replication_id'].replace(
                '+continuous', '')
    active_tasks = dict((x['replication_id'], x) for x in active_tasks
                        if (x['type'] == 'replication' and
                            'replication_id' in x))

    replications = yield rconnection.query_view(Replications,
                                                key=('source', source),
                                                include_docs=True)

    # target -> [(checkpointed_source_seq, continuous, status, replication_id)]
    result = dict()
    for replication in replications:
        target = replication['target']
        result.setdefault(target, list())

        r_id = replication['_replication_id']
        r_state = replication.get('_replication_state')
        r_continuous = replication.get('continuous', False)
        if r_state == 'completed':
            seq = replication['_replication_stats']['checkpointed_source_seq']
            result[target].append((seq, False, 'completed', r_id))
        elif r_state == 'triggered' and r_continuous:
            task = active_tasks.get(r_id)
            if not task:
                result[target].append((0, True, 'task_missing', r_id))
            else:
                seq = task['checkpointed_source_seq']
                result[target].append((seq, True, 'running', r_id))
        else:
            result[target].append((0, r_continuous, r_state, r_id))

    # Sort the results so that the first row for the target
    # is the row with the highest update_seq, aka the most
    # recent one. Secondary sorting is done to promote continuous
    # replication over the 1-time replication.
    for rows in result.itervalues():
        rows.sort(key=lambda x: (x[0], x[1]), reverse=True)

    defer.returnValue(result)
Ejemplo n.º 14
0
 def run_and_configure_db(self):
     yield self.db_process.restart()
     c = self.db_process.get_config()
     db_host, db_port, db_name = c['host'], c['port'], 'test'
     db = database.Database(db_host, db_port, db_name)
     self.db = db.get_connection()
     yield dbtools.create_db(self.db)
     yield dbtools.push_initial_data(self.db)
     defer.returnValue((db_host, db_port, db_name, ))
Ejemplo n.º 15
0
 def get_hosted_recipients(self, state):
     result = list()
     partners = self.query_partners('all')
     for partner in partners:
         agent_id = partner.recipient.key
         hosted = yield state.medium.check_if_hosted(agent_id)
         if hosted:
             result.append(partner.recipient)
     defer.returnValue(result)
Ejemplo n.º 16
0
 def list_slaves(self):
     '''Print information about the slave agencies.'''
     resp = []
     for slave_id, slave in self._broker.slaves.iteritems():
         resp += ["#### Slave %s ####" % slave_id]
         table = yield slave.callRemote('list_agents')
         resp += [table]
         resp += []
     defer.returnValue("\n".join(resp))
Ejemplo n.º 17
0
 def query_partners(self, agent):
     '''
     Generator returning the ShardAgent instances of partners being
     neighbours of the given ShardAgent.
     '''
     result = []
     for p in agent.query_partners('neighbours'):
         ag = yield self.driver.find_agent(p.recipient.key)
         result.append(ag.get_agent())
     defer.returnValue(result)
Ejemplo n.º 18
0
def _check_conflict(connection, doc_id):
    try:
        raw_doc = yield connection.get_document(doc_id, raw=True,
                                                conflicts=True)
    except NotFoundError:
        in_conflict = False
        raw_doc = {'_id': doc_id, '_deleted': True}
    else:
        in_conflict = '_conflicts' in raw_doc
    defer.returnValue((in_conflict, raw_doc))
Ejemplo n.º 19
0
Archivo: query.py Proyecto: f3at/feat
def select(connection, query, skip=0, limit=None, include_responses=False):
    res, responses = yield select_ids(connection, query, skip, limit, include_responses=True)
    temp = yield connection.bulk_get(res)
    res.update(temp)

    if query.include_value:
        yield include_values(res, responses, query)
    if include_responses:
        defer.returnValue((res, responses))
    else:
        defer.returnValue(res)
Ejemplo n.º 20
0
 def start_shard(self):
     a_id = str(uuid.uuid1())
     script = format_block("""
     agency = spawn_agency(start_host=False)
     desc = descriptor_factory('shard_agent', shard='%(shard)s')
     agency.start_agent(desc, run_startup=False)
     agent = _.get_agent()
     agent.look_for_neighbours()
     """) % dict(shard=a_id)
     yield self.process(script)
     defer.returnValue(self.get_local('agent'))
Ejemplo n.º 21
0
def select_ids(connection, query, skip=0, limit=None):
    temp, responses = yield _get_query_response(connection, query)
    if query.sorting:
        temp = sorted(temp, key=_generate_sort_key(responses, query.sorting))
    else:
        temp = list(temp)
    if limit is not None:
        stop = skip + limit
    else:
        stop = None
    defer.returnValue(temp[slice(skip, stop)])
Ejemplo n.º 22
0
 def run_and_configure_db(self):
     yield self.db_process.restart()
     c = self.db_process.get_config()
     db_host, db_port, db_name = c['host'], c['port'], 'test'
     db = database.Database(db_host, db_port, db_name)
     self.db = db.get_connection()
     yield tools.create_db(self.db)
     # disable nagios integration for the purpose of this test
     from feat.agents.alert import alert_agent
     alert_config = alert_agent.AlertAgentConfiguration(enabled=False)
     feat.initial_data(alert_config)
     yield tools.push_initial_data(self.db)
     defer.returnValue((db_host, db_port, db_name, ))
Ejemplo n.º 23
0
 def start_host(self, join_shard=True):
     script = format_block("""
      desc = descriptor_factory('host_agent')
      agency = spawn_agency(start_host=False)
      medium = agency.start_agent(desc, run_startup=False)
      agent = medium.get_agent()
     """)
     yield self.process(script)
     agent = self.get_local('agent')
     if join_shard:
         yield agent.start_join_shard_manager()
     yield self.wait_for_idle(20)
     defer.returnValue(agent)
Ejemplo n.º 24
0
    def wait_for_alert(self, timeout=10):

        def check():
            try:
                alert = self.browser.switch_to_alert()
                alert.text
                return True
            except exceptions.NoAlertPresentException:
                return False

        yield self.wait_for(check, timeout)

        defer.returnValue(self.browser.switch_to_alert())
Ejemplo n.º 25
0
 def find_agent(self, agent_id):
     """
     Return the medium class of the agent with agent_id if the one is
     running in simulation.
     """
     try:
         recp = IRecipient(agent_id)
         agent_id = recp.key
     except TypeError:
         pass
     agency = self.find_agency(agent_id)
     if agency:
         agent = yield agency.find_agent(agent_id)
         defer.returnValue(agent)
Ejemplo n.º 26
0
    def save_document(self, doc):
        doc = IDocument(doc)

        serialized = self._serializer.convert(doc)
        resp = yield self._database.save_doc(serialized, doc.doc_id)
        self._update_id_and_rev(resp, doc)

        for name, attachment in doc.get_attachments().iteritems():
            if not attachment.saved:
                resp = yield self._database.save_attachment(
                    doc.doc_id, doc.rev, attachment)
                self._update_id_and_rev(resp, doc)
                attachment.set_saved()
        defer.returnValue(doc)
Ejemplo n.º 27
0
 def locate_agent(self, recp):
     '''locate_agent(recp): Return (host, port, should_redirect) tuple.
     '''
     if recipient.IRecipient.providedBy(recp):
         agent_id = recp.key
     else:
         agent_id = recp
     found = yield self.find_agent(agent_id)
     if isinstance(found, agency.AgencyAgent):
         host = self.get_hostname()
         port = self.gateway_port
         defer.returnValue((host, port, False, ))
     elif isinstance(found, broker.AgentReference):
         host = self.get_hostname()
         port = yield found.reference.callRemote('get_gateway_port')
         defer.returnValue((host, port, True, ))
     else: # None
         # lazy import not to load descriptor before feat is loaded
         from feat.utils import locate
         db = self._database.get_connection()
         host = yield locate.locate(db, agent_id)
         port = self.config.gateway.port
         if host is None:
             defer.returnValue(None)
         else:
             defer.returnValue((host, port, True, ))
Ejemplo n.º 28
0
Archivo: agency.py Proyecto: f3at/feat
    def locate_agent(self, recp):
        """locate_agent(recp): Return (host, port, should_redirect) tuple.
        """
        if recipient.IRecipient.providedBy(recp):
            agent_id = recp.key
        else:
            agent_id = recp
        found = yield self.find_agent(agent_id)
        if isinstance(found, agency.AgencyAgent):
            host = self.get_hostname()
            port = self.gateway_port
            defer.returnValue((host, port, False))
        elif isinstance(found, broker.AgentReference):
            host = self.get_hostname()
            port = yield found.reference.callRemote("get_gateway_port")
            defer.returnValue((host, port, True))
        else:  # None
            # lazy import not to load descriptor before feat is loaded
            from feat.utils import locate

            db = self._database.get_connection()
            host = yield locate.locate(db, agent_id)
            port = self.config.gateway.port
            if host is None or (self._broker.is_master() and host == self.get_hostname()):
                # Second condition reflects the situation when the agent
                # has its descriptor in the database but is not running.
                # It breaks the infinite redirect loop.
                defer.returnValue(None)
            else:
                defer.returnValue((host, port, True))
Ejemplo n.º 29
0
 def _get_number_of_entries(self, jour, num):
     if isinstance(jour, journaler.Journaler):
         writer = jour._writer
     else:
         writer = jour
     histories = yield writer.get_histories()
     self.assertIsInstance(histories, list)
     self.assertTrue(len(histories) > 0)
     if num > 0:
         self.assertIsInstance(histories[0], journaler.History)
         entries = yield writer.get_entries(histories[0])
         self.assertIsInstance(entries, list)
         defer.returnValue(len(entries))
     else:
         defer.returnValue(0)
Ejemplo n.º 30
0
Archivo: api.py Proyecto: f3at/feat
    def locate_agent(self, agent_id):
        db = self.db()
        agency = self.source

        medium = agency.get_agent(agent_id)
        if medium is not None:
            defer.returnValue(medium.get_agent())

        host = yield locate.locate(db, agent_id)

        if host is None:
            return
        port = self.source.config['gateway']['port']
        res = reference.Absolute((host, port), "apps", "dns", "servers",
                                 agent_id)
        defer.returnValue(res)