def main(cfg: DictConfig) -> None:
    def onboarding_is_valid(onboarding_data):
        outputs = onboarding_data["outputs"]
        answer_str = outputs["answer"]
        # NOTE: depending on which OS Turker uses, there could be carriage returns \r or just newlines \n
        # this python module should handle all cases
        commands = answer_str.splitlines()
        # filter empty commands
        filtered_commands = [x for x in commands if x != ""]
        # Number check: Check that the number of commands >= 3
        if len(commands) < 3:
            return False
        # Length check: Check that the average number of words in commands > 4
        commands_split = [x.split(" ") for x in filtered_commands]
        avg_words_in_commands = sum(map(len,
                                        commands_split)) / len(commands_split)
        if avg_words_in_commands < 2:
            return False
        # Diversity check: Check that commands are reasonably diverse
        first_words = [x[0] for x in commands_split]
        if len(set(first_words)) == 1:
            return False
        # TODO: Grammar check: Check that there is punctuation, capitals
        return True

    shared_state = SharedStaticTaskState(
        onboarding_data={},
        validate_onboarding=onboarding_is_valid,
    )

    db, cfg = load_db_and_process_config(cfg)
    operator = Operator(db)

    operator.validate_and_run_config(cfg.mephisto, shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=30)
示例#2
0
文件: run.py 项目: sagar-spkt/ParlAI
def main(cfg: DictConfig) -> None:
    db, cfg = load_db_and_process_config(cfg)
    world_opt = get_world_opt(cfg)
    onboarding_world_opt = get_onboarding_world_opt(cfg)
    shared_state = SharedParlAITaskState(
        world_opt=world_opt, onboarding_world_opt=onboarding_world_opt)

    check_role_training_qualification(
        db=db,
        qname=world_opt[constants.ROLE_QUALIFICATION_NAME_KEY],
        requester_name=cfg.mephisto.provider.requester_name,
    )

    shared_state.task_config['minTurns'] = world_opt['min_turns']
    shared_state.task_config[
        'onboardingPersona'] = constants.ONBOARDING_PERSONA
    shared_state.worker_can_do_unit = get_worker_eval_function(
        world_opt[constants.ROLE_QUALIFICATION_NAME_KEY],
        onboarding_world_opt['onboarding_qualification'],
    )

    banned_words_fpath = cfg.mephisto.blueprint.banned_words_file
    add_banned_words_frontend_conf(shared_state, banned_words_fpath)

    operator = Operator(db)
    operator.validate_and_run_config(cfg.mephisto, shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=300)
    update_persona_use_counts_file(cfg.mephisto.blueprint.persona_counts_file,
                                   world_opt['prev_persona_count'])
示例#3
0
文件: run.py 项目: tsaoud/ParlAI
def main(cfg: DictConfig) -> None:
    db, cfg = load_db_and_process_config(cfg)
    operator = Operator(db)
    operator.validate_and_run_config(run_config=cfg.mephisto,
                                     shared_state=None)
    operator.wait_for_runs_then_shutdown(skip_input=True,
                                         log_rate=cfg.monitoring_log_rate)
示例#4
0
文件: run.py 项目: sunminyu/ParlAI
def main(cfg: DictConfig) -> None:
    db, cfg = load_db_and_process_config(cfg)

    parser = ParlaiParser(True, False)
    opt = parser.parse_args(
        list(chain.from_iterable(
            ('--' + k, v) for k, v in cfg.teacher.items())))
    agent = RepeatLabelAgent(opt)
    teacher = create_task(opt, agent).get_task_agent()

    world_opt = {"turn_timeout": cfg.turn_timeout, "teacher": teacher}

    custom_bundle_path = cfg.mephisto.blueprint.get("custom_source_bundle",
                                                    None)
    if custom_bundle_path is not None:
        assert os.path.exists(custom_bundle_path), (
            "Must build the custom bundle with `npm install; npm run dev` from within "
            f"the {TASK_DIRECTORY}/webapp directory in order to demo a custom bundle "
        )
        world_opt["send_task_data"] = True

    shared_state = SharedParlAITaskState(world_opt=world_opt,
                                         onboarding_world_opt=world_opt)

    operator = Operator(db)

    operator.validate_and_run_config(cfg.mephisto, shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=30)
示例#5
0
def run_static_task(cfg: DictConfig, task_directory: str):
    """
    Run static task, given configuration.
    """

    db, cfg = load_db_and_process_config(cfg)
    print(f'\nHydra config:\n{OmegaConf.to_yaml(cfg)}')

    random.seed(42)

    task_name = cfg.mephisto.task.get('task_name', 'turn_annotations_static')
    soft_block_qual_name = cfg.mephisto.blueprint.get('block_qualification',
                                                      f'{task_name}_block')
    # Default to a task-specific name to avoid soft-block collisions
    soft_block_mturk_workers(cfg=cfg,
                             db=db,
                             soft_block_qual_name=soft_block_qual_name)

    build_task(task_directory)

    operator = Operator(db)
    operator.validate_and_run_config(run_config=cfg.mephisto,
                                     shared_state=None)
    operator.wait_for_runs_then_shutdown(skip_input=True,
                                         log_rate=cfg.monitoring_log_rate)
示例#6
0
 def run_acute_eval(self):
     """
     Run ACUTE Eval.
     """
     self.set_up_acute_eval()
     db, cfg = load_db_and_process_config(self.args)
     operator = Operator(db)
     operator.validate_and_run_config(run_config=cfg.mephisto, shared_state=None)
     operator.wait_for_runs_then_shutdown(
         skip_input=True, log_rate=cfg.monitoring_log_rate
     )
示例#7
0
def main(cfg: DictConfig) -> None:

    shared_state = SharedStaticTaskState(qualifications=[
        make_qualification_dict(ALLOWLIST_QUALIFICATION, QUAL_EXISTS, None),
    ], )

    db, cfg = load_db_and_process_config(cfg)
    operator = Operator(db)

    operator.validate_and_run_config(cfg.mephisto, shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=30)
示例#8
0
 def run_acute_eval(self):
     """
     Run ACUTE Eval.
     """
     self.set_up_acute_eval()
     db, cfg = load_db_and_process_config(self.args)
     print(f'*** RUN ID: {cfg.mephisto.task.task_name} ***')
     print(f'\nHydra config:\n{OmegaConf.to_yaml(cfg)}')
     operator = Operator(db)
     operator.validate_and_run_config(run_config=cfg.mephisto,
                                      shared_state=None)
     operator.wait_for_runs_then_shutdown(skip_input=True,
                                          log_rate=cfg.monitoring_log_rate)
def main(cfg: DictConfig) -> None:
    correct_config_answer = cfg.correct_answer

    def onboarding_is_valid(onboarding_data):
        inputs = onboarding_data["inputs"]
        outputs = onboarding_data["outputs"]
        return outputs.get("answer") == correct_config_answer

    shared_state = SharedStaticTaskState(
        onboarding_data={"correct_answer": correct_config_answer},
        validate_onboarding=onboarding_is_valid,
    )

    db, cfg = load_db_and_process_config(cfg)
    operator = Operator(db)

    operator.validate_and_run_config(cfg.mephisto, shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=30)
示例#10
0
def main(cfg: DictConfig) -> None:
    db, cfg = load_db_and_process_config(cfg)
    operator = Operator(db)

    validator = validate_unit

    shared_state = SharedStaticTaskState(on_unit_submitted=validator, )
    # Do not allow workers to take pilot task the second time
    shared_state.qualifications = [
        make_qualification_dict(
            PILOT_BLOCK_QUAL_NAME,
            QUAL_NOT_EXIST,
            None,
        ),
    ]

    operator.validate_and_run_config(cfg.mephisto, shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=30)
示例#11
0
def main(cfg: DictConfig) -> None:
    db, cfg = load_db_and_process_config(cfg)

    teacher = get_teacher(cfg)
    world_opt = {"turn_timeout": cfg.turn_timeout, "teacher": teacher}

    custom_bundle_path = cfg.mephisto.blueprint.get("custom_source_bundle",
                                                    None)
    if custom_bundle_path is not None:
        if not os.path.exists(custom_bundle_path):
            build_task(TASK_DIRECTORY)

    shared_state = SharedParlAITaskState(world_opt=world_opt,
                                         onboarding_world_opt=world_opt)

    operator = Operator(db)
    operator.validate_and_run_config(run_config=cfg.mephisto,
                                     shared_state=shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True,
                                         log_rate=cfg.monitoring_log_rate)
示例#12
0
def main(cfg: DictConfig) -> None:
    db, cfg = load_db_and_process_config(cfg)

    world_opt = {"num_turns": cfg.num_turns, "turn_timeout": cfg.turn_timeout}

    custom_bundle_path = cfg.mephisto.blueprint.get("custom_source_bundle",
                                                    None)
    if custom_bundle_path is not None:
        assert os.path.exists(custom_bundle_path), (
            "Must build the custom bundle with `npm install; npm run dev` from within "
            f"the {TASK_DIRECTORY}/webapp directory in order to demo a custom bundle "
        )
        world_opt["send_task_data"] = True

    shared_state = SharedParlAITaskState(world_opt=world_opt,
                                         onboarding_world_opt=world_opt)

    operator = Operator(db)

    operator.validate_and_run_config(cfg.mephisto, shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=30)
示例#13
0
def main(cfg: DictConfig) -> None:
    task_dir = cfg.task_dir

    def onboarding_always_valid(onboarding_data):
        return True

    shared_state = SharedStaticTaskState(
        static_task_data=[
            {"text": "This text is good text!"},
            {"text": "This text is bad text!"},
        ],
        validate_onboarding=onboarding_always_valid,
    )

    build_task(task_dir)

    db, cfg = load_db_and_process_config(cfg)
    operator = Operator(db)

    operator.validate_and_run_config(cfg.mephisto, shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=30)
示例#14
0
文件: impl.py 项目: simplecoka/cortx
def run_task(cfg: DictConfig, task_directory: str):
    """
    Run task, given configuration.
    """

    frontend_source_dir = os.path.join(task_directory, "webapp")
    frontend_build_dir = os.path.join(frontend_source_dir, "build")
    _ = frontend_build_dir  # Unused at the moment

    db, cfg = load_db_and_process_config(cfg)
    print(f'\nHydra config:\n{OmegaConf.to_yaml(cfg)}')

    random.seed(42)

    # Update task name when on sandbox or local to ensure data is split.
    task_name = cfg.mephisto.task.get('task_name', 'model_chat')
    architect_type = cfg.mephisto.architect._architect_type
    if architect_type == 'local':
        task_name = f"{task_name}_local"
    elif architect_type == 'mturk_sandbox':
        task_name = f"{task_name}_sandbox"
    cfg.mephisto.task.task_name = task_name

    soft_block_qual_name = cfg.mephisto.blueprint.get('block_qualification',
                                                      f'{task_name}_block')
    # Default to a task-specific name to avoid soft-block collisions
    soft_block_mturk_workers(cfg=cfg,
                             db=db,
                             soft_block_qual_name=soft_block_qual_name)

    # Init
    shared_state = SharedModelChatTaskState(world_module=world_module)

    operator = Operator(db)
    operator.validate_and_run_config(run_config=cfg.mephisto,
                                     shared_state=shared_state)
    operator.wait_for_runs_then_shutdown(skip_input=True,
                                         log_rate=cfg.monitoring_log_rate)
示例#15
0
def main(cfg: DictConfig) -> None:
    db, cfg = load_db_and_process_config(cfg)
    operator = Operator(db)

    operator.validate_and_run_config(cfg.mephisto)
    operator.wait_for_runs_then_shutdown(skip_input=True, log_rate=30)