def i_create_source_from_file(step, data=None, output_dir=None): if data is None or output_dir is None: assert False command = ("bigmler --train " + res_filename(data) + " --store --output-dir " + output_dir + " --no-dataset --no-model --store") shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_source_older_newer(step, output_dir=None): if output_dir is None: assert False command = ("bigmler delete --older-than " + world.source_upper + " --newer-than " + world.source_lower + " --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_create_dataset_from_source_with_tag(step, tag=None, output_dir=None): if tag is None or output_dir is None: assert False command = ("bigmler --source " + world.source['resource'] + " --tag " + tag + " --store --output-dir " + output_dir + " --no-model --store") shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_source_older_newer_with_resource_types(step, resource_types=None, output_dir=None): ok_(output_dir is not None and resource_types is not None) command = ("bigmler delete --older-than " + world.source_upper + " --newer-than " + world.source_lower + " --resource-types " + resource_types + " --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_create_source_from_stdin(step, data=None, output_dir=None): if data is None or output_dir is None: assert False command = (CAT + res_filename(data) + u"|bigmler --train " + u"--store --no-dataset --no-model --output-dir " + output_dir + u" --max-batch-models 1") shell_execute(command, output_dir + "/test", test=None)
def i_create_dataset_from_source_with_tag(step, tag=None, output_dir=None): ok_(tag is not None and output_dir is not None) command = ("bigmler --source " + world.source['resource'] + " --tag " + tag + " --store --output-dir " + output_dir + " --no-model --store") shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_create_source_from_file(step, data=None, output_dir=None): ok_(data is not None and output_dir is not None) command = ("bigmler --train " + res_filename(data) + " --store --output-dir " + output_dir + " --no-dataset --no-model --store") shell_execute(command, os.path.join(output_dir, "p.csv"), test=None, project=False)
def i_delete_source_by_ids_filtered(step, resource_types=None, output_dir=None): if output_dir is None or resource_types is None: assert False command = ("bigmler delete --ids " + world.source['resource'] + " --dry-run --output-dir " + output_dir + " --resource-types " + resource_types) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_resources_newer_and_tag(step, tag=None, output_dir=None): if output_dir is None or tag is None: assert False command = ("bigmler delete --newer-than " + world.source_lower + " --all-tag " + tag + " --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_create_source_from_file_with_tag(step, data=None, tag=None, output_dir=None): if data is None or output_dir is None or tag is None: assert False command = ("bigmler --train " + res_filename(data) + " --store --output-dir " + output_dir + " --tag " + tag + " --no-dataset --no-model --store") shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_source_by_file_filtered(step, resource_types=None, output_dir=None): if output_dir is None or resource_types is None: assert False command = ("bigmler delete --from-file %s%ssource " % (output_dir, os.sep) + " --output-dir " + output_dir + " --resource-types " + resource_types) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_source_by_file_filtered(step, resource_types=None, output_dir=None): ok_(output_dir is not None and resource_types is not None) command = ("bigmler delete --from-file %s%ssource " % (output_dir, os.sep) + " --output-dir " + output_dir + " --resource-types " + resource_types) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_create_all_resources_to_test_from_stdin(step, data=None, test=None, name=None, output=None): if data is None or test is None or output is None or name is None: assert False test = res_filename(test) command = ("cat " + test + "|bigmler --train " + res_filename(data) + " --test --store --output " + output + " --name \"" + name + "\" --max-batch-models 1") shell_execute(command, output, test=test)
def i_create_faulty_source_from_file_with_tag(step, data=None, tag=None, output_dir=None): ok_(data is not None and output_dir is not None and tag is not None) command = ("bigmler --train " + res_filename(data) + " --store --output-dir " + output_dir + " --tag " + tag + " --no-dataset --no-model --store") try: shell_execute(command, os.path.join(output_dir, "p.csv"), test=None) except: pass
def i_create_all_resources_to_test_from_stdin(step, data=None, test=None, name=None, output=None): if data is None or test is None or output is None or name is None: assert False test = res_filename(test) if not PYTHON3: name = name.decode("utf-8") command = (CAT + test + u"|bigmler --train " + res_filename(data) + u" --test --store --output " + output + u" --name \"" + name + u"\" --max-batch-models 1") shell_execute(command, output, test=test)
def i_create_all_resources_in_output_dir(step, data=None, output_dir=None): if output_dir is None or data is None: assert False command = ("bigmler --train " + res_filename(data) + " --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_source_by_ids_dry(step, output_dir=None): if output_dir is None: assert False command = ("bigmler delete --ids " + world.source['resource'] + " --dry-run --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_resources_from_dir(step): command = ("bigmler delete --from-dir " + world.directory + " --output-dir " + world.directory) shell_execute(command, os.path.join(world.directory, "p.csv"), test=None)
def i_create_all_resources_in_output_dir(step, data=None, output_dir=None): ok_(output_dir is not None and data is not None) command = ("bigmler --train " + res_filename(data) + " --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_source_faulty_by_file(step, output_dir=None): ok_(output_dir is not None) command = ("bigmler delete --from-file %s%ssource " % (output_dir, os.sep) + " --status faulty --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_source_newer_faulty_and_tag(step, tag=None, output_dir=None): ok_(output_dir is not None and tag is not None) command = ("bigmler delete --newer-than " + world.source_lower + " --source-tag " + tag + " --status faulty --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)
def i_delete_source_by_file(step, output_dir=None): if output_dir is None: assert False command = ("bigmler delete --from-file %s%ssource " % (output_dir, os.sep) + " --output-dir " + output_dir) shell_execute(command, os.path.join(output_dir, "p.csv"), test=None)