Beispiel #1
0
    def reset(self, client, rj, futures=None):
        """
        Stop the algorithm. The algorithm will be deleted shortly after
        this function is called.
        """
        reset = rj.jsonget("reset", root)
        logger.warning("reset=%s for %s", reset, self.ident)
        if not reset:
            return False

        logger.warning(f"Deleting various keys for {self.ident}")
        rj2 = RedisClient(host="redis", port=6379, decode_responses=False)
        rj2.delete(f"state-{self.ident}")
        rj2.delete(f"model-{self.ident}")
        rj.jsondel(f"alg-perf-{self.ident}", root)
        rj.delete(f"alg-perf-{self.ident}")

        # Clear answers
        logger.warning(f"Clearing answers for {self.ident}")
        self.get_answers(rj, clear=True)

        # Clear queries (twice)
        logger.warning(f"Clearing queries for {self.ident}")
        key = f"alg-{self.ident}-queries"
        for k in range(4, 18):
            limit = 2**k
            rj.zremrangebyscore(key, -limit, limit)
            sleep(0.1)
            n_queries = rj.zcard(key)
            logger.warning(f"n_queries={n_queries}")
            if not n_queries:
                break
        logger.warning(f"Clearing queries again for {self.ident}")
        self.clear_queries(rj)

        if futures:
            for future in futures:
                if future:
                    client.cancel(future, force=True)

        logger.warning(f"Restarting Dask client for {self.ident}")
        f = client.restart(timeout="5s")
        try:
            client.sync(f)
        except:
            pass

        client.run(garbage_collect)

        logger.warning(f"Setting stopped-{self.ident}")
        rj.jsonset(f"stopped-{self.ident}", Path("."), True)
        logger.warning(f"All done stopping {self.ident}")
        return True
def create_nsteps_map(redserv=None,
                      nsteps_map_key=REDIS_FTWC_NSTEPS_MAP,
                      nsteps_index_key=REDIS_FTWC_NSTEPS_INDEX,
                      gameset_keys=FTWC_GAME_SETS):
    """ after all playthroughs have been save to redis, index number of steps <-> game names """
    if redserv is None:
        _rj = Client(host='localhost', port=6379, decode_responses=True)
    else:
        _rj = redserv

    if nsteps_map_key == REDIS_GATA_NSTEPS_MAP:
        redisbasekey = REDIS_GATA_PLAYTHROUGHS
    elif nsteps_map_key == REDIS_FTWC_NSTEPS_MAP:
        redisbasekey = REDIS_FTWC_PLAYTHROUGHS
    else:
        assert False, "Unknown Redis nsteps_map_key " + nsteps_map_key

    for key in _rj.keys(nsteps_index_key + "*"):
        print("Will delete:", key)
        _rj.delete(key)
    print(_rj.hlen(nsteps_map_key))
    _rj.delete(nsteps_map_key)

    for setkey in gameset_keys:
        game_names_ = _rj.smembers(setkey)
        for _gn in game_names_:
            nsteps = retrieve_playthrough_nsteps(_gn,
                                                 redis=_rj,
                                                 redisbasekey=redisbasekey)
            if nsteps > 0:
                print(nsteps, _gn)
                _rj.hset(nsteps_map_key, _gn, nsteps)
                _rj.sadd(f"{nsteps_index_key}{nsteps}", _gn)

    print(len(_rj.keys(nsteps_index_key + "*")), _rj.hlen(nsteps_map_key))
    total = 0
    sort_list = []
    for key in _rj.keys(nsteps_index_key + "*"):
        nsteps = int(key.split(':')[-1])
        num_games = _rj.scard(key)
        total += num_games
        sort_list.append((nsteps, num_games, key))
        # print(key,  "has", num_games, "game names")
    sort_list.sort()
    for nsteps, num_games, setkey in sort_list:
        print(f"[{nsteps}]\t {num_games}\t {setkey}")
    if redserv is None:
        _rj.close()
Beispiel #3
0
 def clear_queries(self, rj: RedisClient) -> bool:
     """
     Clear all queries that this sampler has posted from the database.
     """
     rj.delete(f"alg-{self.ident}-queries")
     return True
class JobsAPI(Resource):
    def __init__(self):
        self.redis = Client(host='127.0.0.1', port=6379, decode_responses=True)

    def get(self, **kwargs):
        if kwargs.get('job_id'):
            job_id = kwargs.get('job_id')
            if self.redis.exists(job_id):
                parser = reqparse.RequestParser()

                if request.url_rule.rule == '/jobs/<string:job_id>/next':
                    parser.add_argument('expired_duration',
                                        type=int,
                                        default=300)
                    args = parser.parse_args(strict=True)
                    if self.redis.jsonget(job_id, Path('.items')):
                        ttl = args.get('expired_duration')
                        items = self.redis.jsonget(job_id, Path('.items'))
                        for item in items:
                            if not self.redis.exists(f'hold_{item}'):
                                self.redis.execute_command(
                                    'SET', f'hold_{item}', job_id)
                                self.redis.execute_command(
                                    'EXPIRE', f'hold_{item}', ttl)
                                return output_json(
                                    {
                                        'status': 'ok',
                                        'job_id': job_id,
                                        'ttl': ttl,
                                        'index': items.index(item),
                                        'item': item
                                    }, 200)
                    return output_json(
                        {
                            'status': 'error',
                            'job_id': job_id,
                            'description': 'Items list is empty.'
                        }, 400)

                if request.url_rule.rule == '/jobs/<string:job_id>/items':
                    parser.add_argument('active',
                                        default='true',
                                        choices=('true', 'false'))
                    args = parser.parse_args(strict=True)
                    items = self.redis.jsonget(job_id, Path('.items'))
                    done_items = self.redis.jsonget(job_id, Path('.done'))
                    if args.get('active') == 'true':
                        active_items = []
                        for item in items:
                            if not self.redis.exists(f'hold_{item}') and \
                                    items.index(item) not in done_items:
                                active_items.append(item)
                        return output_json(
                            {
                                'status': 'ok',
                                'job_id': job_id,
                                'items': active_items
                            }, 200)
                    return output_json(
                        {
                            'status': 'ok',
                            'job_id': job_id,
                            'items': items + done_items
                        }, 200)
            else:
                return output_json(
                    {
                        'status': 'error',
                        'job_id': job_id,
                        'description': 'The job is not in the queue.'
                    }, 400)

        return output_json(
            {
                'status': 'ok',
                'jobs': [i for i in self.redis.keys() if i[:5] != 'hold_']
            }, 200)

    def post(self, **kwargs):
        if request.url_rule.rule == '/jobs/<string:job_id>/items/<int:item_index>/done':
            job_id = kwargs.get('job_id')
            item_index = kwargs.get('item_index')
            done_item = self.redis.jsonget(job_id, Path('.items'))[item_index]
            if item_index in self.redis.jsonget(job_id, Path('.done')):
                return output_json(
                    {
                        'status': 'error',
                        'description': 'The item already was marked as done.',
                        'job_id': job_id,
                        'index': item_index,
                        'item': done_item
                    }, 400)
            self.redis.delete(f'hold_{done_item}')
            self.redis.jsonarrappend(job_id, Path('.done'), item_index)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'The item is marked as done.',
                    'job_id': job_id,
                    'index': item_index,
                    'item': done_item
                }, 200)

        if request.url_rule.rule == '/jobs/<string:job_id>/items/<int:item_index>/error':
            job_id = kwargs.get('job_id')
            item_index = kwargs.get('item_index')
            error_item = self.redis.jsonget(job_id, Path('.items'))[item_index]
            if item_index in self.redis.jsonget(job_id, Path('.error')):
                return output_json(
                    {
                        'status': 'error',
                        'description': 'The item already was marked as error.',
                        'job_id': job_id,
                        'index': item_index,
                        'item': error_item
                    }, 400)
            self.redis.delete(f'hold_{error_item}')
            self.redis.jsonarrappend(job_id, Path('.error'), item_index)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'The item is marked as error.',
                    'job_id': job_id,
                    'index': item_index,
                    'item': error_item
                }, 200)

        if isinstance(request.json, list) and request.json:
            job_id = str(uuid.uuid4())

            data = {'items': request.json, 'done': [], 'error': []}

            if self.redis.jsonset(job_id, Path.rootPath(), data):
                return output_json(
                    {
                        'status': 'ok',
                        'description': 'Job is added to queue.',
                        'job_id': job_id
                    }, 201)
        else:
            return output_json(
                {
                    'status': 'error',
                    'description': 'Wrong request!'
                }, 400)

    def delete(self, job_id):
        if self.redis.exists(job_id):
            self.redis.delete(job_id)
            return output_json(
                {
                    'status': 'ok',
                    'description': 'Job is deleted.'
                }, 200)
        else:
            return output_json(
                {
                    'status': 'error',
                    'description': 'The job is not in the queue.'
                }, 400)
Beispiel #5
0
def _reset(timeout: float = 5):
    _save(rj)

    # Stop background jobs (ie adaptive algs)
    rj.jsonset("reset", root, True)
    rj2 = Client(host="redis", port=6379, decode_responses=False)
    if "samplers" in rj.keys():
        samplers = rj.jsonget("samplers")
        stopped = {name: False for name in samplers}
        __deadline = time() + timeout
        for k in itertools.count():
            rj.jsonset("reset", root, True)
            for name in stopped:
                if f"stopped-{name}" in rj.keys():
                    stopped[name] = rj.jsonget(f"stopped-{name}", root)
            if all(stopped.values()):
                logger.warning(f"stopped={stopped}")
                break
            sleep(1)
            logger.warning(
                f"Waited {k + 1} seconds algorithms... stopped? {stopped}"
                f" (rj.keys() == {rj.keys()}"
            )
            if timeout and time() >= __deadline:
                logger.warning(f"Hit timeout={timeout} w/ stopped={stopped}. Breaking!")
                break

        logger.warning("    starting with clearing queries...")
        for ident in samplers:
            rj2.delete(f"alg-{ident}-queries")
    httpx.post(f"http://localhost:8400/reset/")

    logger.warning("Trying to completely flush database...")

    for _rj in [rj, rj2]:
        _rj.memory_purge()
        _rj.flushall(asynchronous=True)
        _rj.flushdb(asynchronous=True)
        _rj.memory_purge()
        for k in _rj.keys():
            _rj.delete(k)
        _rj.flushdb(asynchronous=False)
        _rj.flushall(asynchronous=False)

    now = datetime.now().isoformat()[: 10 + 6]

    save_dir = ROOT_DIR / "out"
    files = [f.name for f in save_dir.glob("*")]
    logger.warning(f"dump_rdb in files? {'dump.rdb' in files}")
    if "dump.rdb" in files:
        logger.warning(f"Moving dump.rdb to dump-{now}.rdb")
        shutil.move(str(save_dir / "dump.rdb"), str(save_dir / f"dump-{now}.rdb"))
        files = [f.name for f in save_dir.glob("*")]
        logger.warning(f"after moving, dump_rdb in files? {'dump.rdb' in files}")
    files = [f.name for f in save_dir.glob("*")]
    assert "dump.rdb" not in files

    logger.warning("After reset, rj.keys=%s", rj.keys())
    rj.jsonset("responses", root, {})
    rj.jsonset("start_time", root, -1)
    rj.jsonset("start_datetime", root, "-1")
    rj.jsonset("exp_config", root, {})
    return {"success": True}