def check_summary_like_expected(step, summary_file, expected_file): summary_contents = [] expected_contents = [] with UnicodeReader(res_filename(summary_file)) as summary_handler: for line in summary_handler: summary_contents.append(line) with UnicodeReader(res_filename(expected_file)) as expected_handler: for line in expected_handler: expected_contents.append(line) eq_(summary_contents, expected_contents)
def i_create_a_multivote(step, predictions_file): predictions_file = res_filename(predictions_file) try: with open(predictions_file, 'r') as predictions_file: world.multivote = MultiVote(json.load(predictions_file)) except IOError: assert False, "Failed to read %s" % predictions_file
def i_upload_a_file(step, file): resource = world.api.create_source(res_filename(file), {"project": world.project_id}) # update status world.status = resource["code"] world.location = resource["location"] world.source = resource["object"] # save reference world.sources.append(resource["resource"])
def i_upload_a_file_with_project_conn(step, file): resource = world.api.create_source(res_filename(file)) # update status world.status = resource['code'] world.location = resource['location'] world.source = resource['object'] # save reference world.sources.append(resource['resource'])
def i_upload_a_file_with_args(step, file, args): resource = world.api.create_source(res_filename(file), json.loads(args)) # update status world.status = resource['code'] world.location = resource['location'] world.source = resource['object'] # save reference world.sources.append(resource['resource'])
def i_create_a_script_from_file_or_url(step, source_code): if not is_url(source_code): source_code = res_filename(source_code) resource = world.api.create_script(source_code) world.status = resource['code'] eq_(world.status, HTTP_CREATED) world.location = resource['location'] world.script = resource['object'] world.scripts.append(resource['resource'])
def the_local_association_set_is_like_file(step, filename): filename = res_filename(filename) """ Uncomment if different text settings are used with open(filename, "w") as filehandler: json.dump(result, filehandler) """ with open(filename) as filehandler: file_result = json.load(filehandler) eq_(world.local_association_set, file_result)
def i_upload_a_file(step, file): resource = world.api.create_source(res_filename(file), \ {'project': world.project_id}) # update status world.status = resource['code'] world.location = resource['location'] world.source = resource['object'] # save reference world.sources.append(resource['resource'])
def the_association_set_is_like_file(step, filename): filename = res_filename(filename) result = world.association_set.get("association_set", {}).get("result", []) """ Uncomment if different text settings are used with open(filename, "w") as filehandler: json.dump(result, filehandler) """ with open(filename) as filehandler: file_result = json.load(filehandler) eq_(result, file_result)
def the_association_set_is_like_file(step, filename): filename = res_filename(filename) result = world.association_set.get("association_set",{}).get("result", []) """ Uncomment if different text settings are used with open(filename, "w") as filehandler: json.dump(result, filehandler) """ with open(filename) as filehandler: file_result = json.load(filehandler) eq_(result, file_result)
def i_upload_a_file_with_args(step, file, args): args = json.loads(args) args.update({"project": world.project_id}) resource = world.api.create_source(res_filename(file), args) # update status world.status = resource["code"] world.location = resource["location"] world.source = resource["object"] # save reference world.sources.append(resource["resource"])
def i_upload_a_file_with_args(step, file, args): args = json.loads(args) args.update({'project': world.project_id}) resource = world.api.create_source(res_filename(file), args) # update status world.status = resource['code'] world.location = resource['location'] world.source = resource['object'] # save reference world.sources.append(resource['resource'])
def i_upload_a_file_from_stdin(step, file): file_name = res_filename(file) with open(file_name, 'rb') as file_handler: resource = world.api.create_source(file_handler, \ {'project': world.project_id}) # update status world.status = resource['code'] world.location = resource['location'] world.source = resource['object'] # save reference world.sources.append(resource['resource'])
def the_local_association_set_is_like_file(step, filename): filename = res_filename(filename) """ Uncomment if different text settings are used with open(filename, "w") as filehandler: json.dump(result, filehandler) """ with open(filename) as filehandler: file_result = json.load(filehandler) for index in range(0, len(file_result)): result = file_result[index] assert_almost_equal( \ result['score'], world.local_association_set[index]['score'], places=5) eq_(result['rules'], world.local_association_set[index]['rules'])
def i_create_using_dict_data(step, data): # slurp CSV file to local variable mode = "rb" if sys.version > "3": mode = "rt" with open(res_filename(data), mode) as fid: reader = csv.DictReader(fid) dict_data = [row for row in reader] # create source resource = world.api.create_source(dict_data, {"project": world.project_id}) # update status world.status = resource["code"] world.location = resource["location"] world.source = resource["object"] # save reference world.sources.append(resource["resource"])
def i_create_using_dict_data(step, data): # slurp CSV file to local variable mode = 'rb' if sys.version > '3': mode = 'rt' with open(res_filename(data), mode) as fid: reader = csv.DictReader(fid) dict_data = [row for row in reader] # create source resource = world.api.create_source(dict_data) # update status world.status = resource['code'] world.location = resource['location'] world.source = resource['object'] # save reference world.sources.append(resource['resource'])
def create_local_ensemble_predictor(step, directory): module_dir = directory directory = res_filename(directory) with open(os.path.join(directory, "ensemble.json")) as file_handler: ensemble = json.load(file_handler) world.local_ensemble = EnsemblePredictor(ensemble, module_dir)
def import_summary_file(step, summary_file): world.fields_struct = world.fields.new_fields_structure( \ csv_attributes_file=res_filename(summary_file))
def i_upload_a_file_async(step, file): resource = world.api.create_source(res_filename(file), {'project': world.project_id}, async=True) world.resource = resource
def i_create_local_logistic_regression_from_file(step, export_file): world.local_logistic = LogisticRegression(res_filename(export_file))
def i_create_local_ensemble_from_file(step, export_file): world.local_ensemble = Ensemble(res_filename(export_file))
def i_create_local_model_from_file(step, export_file): world.local_model = Model(res_filename(export_file))
def i_export_model(step, pmml, filename): world.api.export(world.model["resource"], res_filename(filename), pmml)
def generate_summary(step, summary_file): world.fields.summary_csv(res_filename(summary_file))
def i_upload_a_file_async(step, file): resource = world.api.create_source(res_filename(file), {'project': world.project_id}, async_load=True) world.resource = resource
def i_export_a_dataset(step, local_file): world.api.download_dataset(world.dataset['resource'], filename=res_filename(local_file))
def i_download_anomaly_score_file(step, filename): file_object = world.api.download_batch_anomaly_score( world.batch_anomaly_score, filename=res_filename(filename)) assert file_object is not None world.output = file_object
def i_download_predictions_file(step, filename): file_object = world.api.download_batch_prediction( world.batch_prediction, filename=res_filename(filename)) assert file_object is not None world.output = file_object
def i_create_local_time_series_from_file(step, export_file): world.local_time_series = TimeSeries(res_filename(export_file))
def i_create_a_local_model_from_file(step, model_file): world.local_model = Model(res_filename(model_file))
def i_export_time_series(step, filename): world.api.export(world.time_series.get('resource'), filename=res_filename(filename))
def i_check_model_stored(step, filename, pmml): with open(res_filename(filename)) as file_handler: content = file_handler.read() model_id = world.model["resource"][ \ (world.model["resource"].index("/") + 1):] assert (content.index(model_id) > -1)
def i_export_cluster(step, filename): world.api.export(world.cluster.get('resource'), filename=res_filename(filename))
def i_export_ensemble(step, filename): world.api.export(world.ensemble.get('resource'), filename=res_filename(filename))
def i_create_local_cluster_from_file(step, export_file): world.local_cluster = Cluster(res_filename(export_file))
def i_export_logistic_regression(step, filename): world.api.export(world.logistic_regression.get('resource'), filename=res_filename(filename))
def files_equal(step, local_file, data): contents_local_file = open(res_filename(local_file)).read() contents_data = open(res_filename(data)).read() assert contents_local_file == contents_data
def i_export_deepnet(step, filename): world.api.export(world.deepnet.get('resource'), filename=res_filename(filename))
def i_upload_a_file_async(step, file): resource = world.api.create_source(res_filename(file), async=True) world.resource = resource
def i_download_centroid_file(step, filename): file_object = world.api.download_batch_centroid( world.batch_centroid, filename=res_filename(filename)) assert file_object is not None world.output = file_object
def i_export_anomaly(step, filename): world.api.export(world.anomaly.get('resource'), filename=res_filename(filename))
def i_check_predictions(step, check_file): with UnicodeReader(world.output) as prediction_rows: with UnicodeReader(res_filename(check_file)) as test_rows: check_rows(prediction_rows, test_rows)
def i_create_local_anomaly_from_file(step, export_file): world.local_anomaly = Anomaly(res_filename(export_file))