Beispiel #1
0
    def claim_sites(self, n=1):
        result = (
            self.rr.table('sites').get_all(
                r.args(
                    r.db(self.rr.dbname).table(
                        'sites', read_mode='majority').between(
                            ['ACTIVE', r.minval], ['ACTIVE', r.maxval],
                            index='sites_last_disclaimed').order_by(
                                r.desc('claimed'), 'last_disclaimed').
                    fold({},
                         lambda acc, site: acc.merge(
                             r.branch(
                                 site.has_fields('job_id'),
                                 r.object(
                                     site['job_id'].coerce_to('string'), acc[
                                         site['job_id'].coerce_to('string')].
                                     default(0).add(1)), {})),
                         emit=lambda acc, site, new_acc: r.branch(
                             r.and_(
                                 r.or_(
                                     site['claimed'].not_(), site[
                                         'last_claimed'].lt(r.now().sub(60 * 60
                                                                        ))),
                                 r.or_(
                                     site.has_fields('max_claimed_sites').not_(
                                     ), new_acc[site['job_id'].coerce_to(
                                         'string')].le(site['max_claimed_sites'
                                                            ]))), [site['id']],
                             [])).limit(n))).
            update(
                # try to avoid a race condition resulting in multiple
                # brozzler-workers claiming the same site
                # see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038
                r.branch(
                    r.or_(r.row['claimed'].not_(),
                          r.row['last_claimed'].lt(r.now().sub(60 * 60))), {
                              'claimed': True,
                              'last_claimed': r.now()
                          }, {}),
                return_changes=True)).run()

        self._vet_result(result,
                         replaced=list(range(n + 1)),
                         unchanged=list(range(n + 1)))
        sites = []
        for i in range(result["replaced"]):
            if result["changes"][i]["old_val"]["claimed"]:
                self.logger.warn(
                    "re-claimed site that was still marked 'claimed' "
                    "because it was last claimed a long time ago "
                    "at %s, and presumably some error stopped it from "
                    "being disclaimed",
                    result["changes"][i]["old_val"]["last_claimed"])
            site = brozzler.Site(self.rr, result["changes"][i]["new_val"])
            sites.append(site)
        if sites:
            return sites
        else:
            raise brozzler.NothingToClaim
Beispiel #2
0
    def claim_sites(self, n=1):
        self.logger.trace('claiming up to %s sites to brozzle', n)
        result = (
            self.rr.table('sites').get_all(r.args(
                r.db(self.rr.dbname).table('sites', read_mode='majority')
                .between(
                    ['ACTIVE', r.minval], ['ACTIVE', r.maxval],
                    index='sites_last_disclaimed')
                .order_by(r.desc('claimed'), 'last_disclaimed')
                .fold(
                    {}, lambda acc, site: acc.merge(
                        r.branch(
                            site.has_fields('job_id'),
                            r.object(
                                site['job_id'].coerce_to('string'),
                                acc[site['job_id'].coerce_to('string')].default(0).add(1)),
                            {})),
                    emit=lambda acc, site, new_acc: r.branch(
                        r.and_(
                            r.or_(
                                site['claimed'].not_(),
                                site['last_claimed'].lt(r.now().sub(60*60))),
                            r.or_(
                                site.has_fields('max_claimed_sites').not_(),
                                new_acc[site['job_id'].coerce_to('string')].le(site['max_claimed_sites']))),
                            [site['id']], []))
                .limit(n)))
            .update(
                # try to avoid a race condition resulting in multiple
                # brozzler-workers claiming the same site
                # see https://github.com/rethinkdb/rethinkdb/issues/3235#issuecomment-60283038
                r.branch(
                    r.or_(
                      r.row['claimed'].not_(),
                      r.row['last_claimed'].lt(r.now().sub(60*60))),
                    {'claimed': True, 'last_claimed': r.now()},
                    {}),
                return_changes=True)).run()

        self._vet_result(
                result, replaced=list(range(n+1)),
                unchanged=list(range(n+1)))
        sites = []
        for i in range(result["replaced"]):
            if result["changes"][i]["old_val"]["claimed"]:
                self.logger.warn(
                        "re-claimed site that was still marked 'claimed' "
                        "because it was last claimed a long time ago "
                        "at %s, and presumably some error stopped it from "
                        "being disclaimed",
                        result["changes"][i]["old_val"]["last_claimed"])
            site = brozzler.Site(self.rr, result["changes"][i]["new_val"])
            sites.append(site)
        self.logger.debug('claimed %s sites', len(sites))
        if sites:
            return sites
        else:
            raise brozzler.NothingToClaim
    def add_new_account(new_data, conn=None):
        cursor = r.table('accounts')\
            .filter(r.or_(r.row['user'].eq(new_data['user']), r.row['email'].eq(new_data['email'])))\
            .limit(1)\
            .run(conn)

        if len(cursor.items):
            doc = cursor.next()
            if doc['user'] == new_data['user']:
                return 'username-taken'
            else:
                return 'email-taken'
        else:
            new_data['salt'], new_data[
                'password'] = AccountManager._salt_and_hash_password(
                    new_data['password'])
            new_data['createDate'] = datetime.now(tz=get_localzone())

            result = r.table('accounts').insert(new_data).run(conn)

            if result and result['inserted'] == 1:
                new_data['id'] = result['generated_keys'][0]
                return new_data
            else:
                logger.error(
                    '[add new account][insert_failed]: {0}'.format(result))
                return None
Beispiel #4
0
 def get_job_submissions_with_states(self, states=[]):
     if len(states) > 0:
         return [dict(doc) for doc in self.job_submission_table.filter(
             r.or_(*tuple([r.row['state'] == s for s in states] )))\
             .run(self.conn)]
     else:
         return [dict(doc) for doc in self.job_submission_table.run(self.conn)]
Beispiel #5
0
    async def get_job_items(self, job_id, include_success, include_error,
                            include_exception, limit, offset):
        ''' Get items from a job. '''
        items = list()
        filters = []

        if include_success:
            filters.append(r.row['is_success'] == True)

        if include_error:
            filters.append((r.row['is_success'] == False) &
                           (~r.row.has_fields('exception')))

        if include_exception:
            filters.append((r.row['is_success'] == False) &
                           (r.row.has_fields('exception')))

        if len(filters) == 0:
            raise Exception('You must set at least one include_* flag to true.')

        def get_body(item):
            return {
                'join': r.branch(
                    item.has_fields('body_id'),
                    r.table('response_body').get(item['body_id']),
                    None
                )
            }

        base_query = (
            r.table('response')
             .between((job_id, r.minval),
                      (job_id, r.maxval),
                      index='sync_index')
             .filter(r.or_(*filters))
        )

        query = (
             base_query
             .skip(offset)
             .limit(limit)
             .merge(get_body)
             .without('body_id')
        )

        async with self._db_pool.connection() as conn:
            total_count = await base_query.count().run(conn)
            cursor = await query.run(conn)
            async for item in cursor:
                items.append(item)
            await cursor.close()

        return total_count, items
Beispiel #6
0
        def type_check(v):
            def type_to_reql(t):
                check = v.type_of() == schema_to_reql_type[t]
                if t == 'integer':
                    # Add additional check for integers
                    check = check & (v.floor() == v)
                return check

            if isinstance(arg, list):
                check = r.or_(*map(type_to_reql, arg))
            else:
                check = type_to_reql(arg)
            return check
Beispiel #7
0
async def create_serials_message(search_query, page_number, limit=10):
    res = await Serial.manager.execute(
        Serial.manager.table
        .order_by(r.desc("year"))\
        .filter(
            r.and_(
                r.or_(
                    r.row["year"].default(2019) >= 2017, r.row["finished"] == False),
                    r.row["search_field"].match(search_query.lower())
            )
        )\
        .slice(page_number * limit - limit, page_number * limit + 1)
    )

    log.debug(f"Create serial message page {res}")

    msg = text(*[
        text(
            hbold(serial["title"]), f'({quote_html(serial["origin_title"])})'
            if serial["origin_title"] else "",
            str(serial["year"]) if serial["year"] else "",
            hbold("\nЗавершён" if serial["finished"] else ""),
            f'\n/serial_{serial["id"]}\n') for serial in res
    ],
               sep="\n")

    inline_pagination = types.InlineKeyboardMarkup(row_width=2)

    btn_row = [
        types.InlineKeyboardButton(">", callback_data="next_search_page")
    ]
    if page_number > 1:
        btn_row.insert(
            0, types.InlineKeyboardButton("<",
                                          callback_data="prev_search_page"))
        inline_pagination.add(
            types.InlineKeyboardButton("В начало",
                                       callback_data="start_search_page"))
    inline_pagination.add(*btn_row)

    return {
        "text": msg,
        "reply_markup": inline_pagination,
        "parse_mode": ParseMode.HTML
    }
    def add_new_account(new_data, conn=None):
        cursor = r.table('accounts')\
            .filter(r.or_(r.row['user'].eq(new_data['user']), r.row['email'].eq(new_data['email'])))\
            .limit(1)\
            .run(conn)

        if len(cursor.items):
            doc = cursor.next()
            if doc['user'] == new_data['user']:
                return 'username-taken'
            else:
                return 'email-taken'
        else:
            new_data['salt'], new_data['password'] = AccountManager._salt_and_hash_password(new_data['password'])
            new_data['createDate'] = datetime.now(tz=get_localzone())

            result = r.table('accounts').insert(new_data).run(conn)

            if result and result['inserted'] == 1:
                new_data['id'] = result['generated_keys'][0]
                return new_data
            else:
                logger.error('[add new account][insert_failed]: {0}'.format(result))
                return None
Beispiel #9
0
    def get(self, id=None, o=None, a=None, b=None):
        """
            - GET /testbeds
                (public) Testbed list

            - GET /testbeds/<id>
                (public) Testbed with <id>

            - GET /testbeds/<id>/(resources)
                (auth) Resource list of the testbed <id>

            - GET /testbeds/<id>/resources?timestamp_start=<XXX>&timestamp_end=<XXX>
                (auth) Resource list of the testbed <id> that are available within a time range

            - GET /testbeds/<id>/leases
                Leases list of the testbed with the <id>

            - GET /testbeds/<id>/leases?timestamp_start=<XXX>&timestamp_end=<XXX>
                (auth) Leases list of the testbed <id> within a time range

            :return:
            """

        response = []
        current_user = self.get_current_user()

        # [?timestamp_start=<XXX>&timestamp_end=<XXX>]
        ts = self.get_argument('timestamp_start',None)
        te = self.get_argument('timestamp_end',None)

        # GET /testbeds
        if not id and not o:
            cursor = yield r.table('testbeds') \
                .run(self.dbconnection)
            while (yield cursor.fetch_next()):
                testbed = yield cursor.next()
                response.append(testbed)


        # GET /testbeds/<id>
        elif not o and id and self.isUrn(id):

            cursor = yield r.table('testbeds') \
                .filter({'id': id}) \
                .run(self.dbconnection)
            while (yield cursor.fetch_next()):
                testbed = yield cursor.next()
                response.append(testbed)

        # GET /testbeds/<id>/resources
        elif id and self.isUrn(id) and o == 'resources' and not ts and not te:
            cursor = yield r.table(o) \
                .filter(lambda resource: resource["testbed"] == id) \
                .run(self.dbconnection)

            while (yield cursor.fetch_next()):
                item = yield cursor.next()
                response.append(item)

        # GET /testbeds/<id>/resources?timestamp_start=<XXX>&timestamp_end=<XXX>
        elif id and self.isUrn(id) and o == 'resources':
            try:
                nb_leases = yield r.table("leases").count().run(self.dbconnection)
                if nb_leases > 0:
                    # Resources NOT in Leases
                    cursor = yield r.table(o) \
                        .filter(lambda resource: resource["testbed"] == id) \
                        .filter({'available':'true'}) \
                        .filter( lambda resource:
                            r.table("leases").map(lambda l:
                                l['resources'].coerce_to('array')
                            ).reduce(lambda left, right:
                                left.set_union(right)
                            ).contains(resource['id']).not_() \
                        ).run(self.dbconnection)

                    while (yield cursor.fetch_next()):
                        item = yield cursor.next()
                        response.append(item)

                    if ts and te:
                        # List of Resources ids in Leases but not in the given time range
                        in_leases = yield r.table("leases").filter(lambda l:
                            r.or_(l['start_time'].gt(int(te)),l['end_time'].lt(int(ts)))
                        ).map(lambda l:
                            l['resources'].coerce_to('array')
                        ).reduce(lambda left, right:
                            left.set_union(right)
                        ).map(lambda x:
                            r.table('resources').get(x)
                        ).filter({'testbed':id}).run(self.dbconnection)
                        response = response + in_leases

                    if ts and not te:
                        # List of Resources ids in Leases but not in the given time range
                        in_leases = yield r.table("leases").filter(lambda l:
                            l['start_time'].gt(int(te))
                        ).map(lambda l:
                            l['resources'].coerce_to('array')
                        ).reduce(lambda left, right:
                            left.set_union(right)
                        ).map(lambda x:
                            r.table('resources').get(x)
                        ).filter({'testbed':id}).run(self.dbconnection)
                        response = response + in_leases

                    if not ts and te:
                        # List of Resources ids in Leases but not in the given time range
                        in_leases = yield r.table("leases").filter(lambda l:
                            l['end_time'].lt(int(ts))
                        ).map(lambda l:
                            l['resources'].coerce_to('array')
                        ).reduce(lambda left, right:
                            left.set_union(right)
                        ).map(lambda x:
                            r.table('resources').get(x)
                        ).filter({'testbed':id}).run(self.dbconnection)
                        response = response + in_leases
                else:
                    # All available Resources (No Leases in DB)
                    cursor = yield r.table(o) \
                        .filter(lambda resource: resource["testbed"] == id) \
                        .filter({'available':'true'}) \
                        .run(self.dbconnection)
                    while (yield cursor.fetch_next()):
                        item = yield cursor.next()
                        response.append(item)
            except Exception as e:
                logger.exception(e)

        # GET /testbeds/<id>/leases
        elif id and self.isUrn(id) and o == 'leases' and not ts and not te:
            cursor = yield r.table(o) \
                .filter(lambda ls: ls["testbed"] == id) \
                .run(self.dbconnection)

            while (yield cursor.fetch_next()):
                item = yield cursor.next()
                response.append(item)

        # GET /testbeds/<id>/leases?timestamp_start=<XXX>&timestamp_end=<XXX>
        elif id and self.isUrn(id) and o == 'leases' and ts and te:
            cursor = yield r.table(o) \
                .filter(lambda ls: ls["testbed"] == id) \
                .filter(lambda l:
                    r.and_(l['start_time'].ge(int(ts)),l['end_time'].le(int(te)))
                ).run(self.dbconnection)

            while (yield cursor.fetch_next()):
                item = yield cursor.next()
                response.append(item)

        # GET /testbeds/<id>/leases?timestamp_start=<XXX>
        elif id and self.isUrn(id) and o == 'leases' and ts and not te:
            cursor = yield r.table(o) \
                .filter(lambda ls: ls["testbed"] == id) \
                .filter(lambda l:
                    l['start_time'].ge(int(ts))
                ).run(self.dbconnection)

            while (yield cursor.fetch_next()):
                item = yield cursor.next()
                response.append(item)

        # GET /testbeds/<id>/leases?timestamp_end=<XXX>
        elif id and self.isUrn(id) and o == 'leases' and not ts and te:
            cursor = yield r.table(o) \
                .filter(lambda ls: ls["testbed"] == id) \
                .filter(lambda l:
                    l['end_time'].le(int(te))
                ).run(self.dbconnection)

            while (yield cursor.fetch_next()):
                item = yield cursor.next()
                response.append(item)
        else:
            self.userError("invalid request")
            return
        self.finish(json.dumps({"result": response}, cls=myJSONEncoder))
Beispiel #10
0
 def or_(self, a):
     '''curried version of r.or_ (accepts only 2 arguments)'''
     return QC(lambda b: r.or_(b, a))