def delete_upload_layer(config_db_entity, config_entity): """ Delete previoust test DbEntity/Layers :param config_db_entity: :param config_entity: :return: """ DbEntityInterest._no_post_save_publishing = True update_or_create_db_entity_and_interest(config_entity, config_db_entity)[0] DbEntityInterest._no_post_save_publishing = False # Wipe out the previous import data since we're just testing on_config_entity_pre_delete_data_import( None, instance=config_entity, db_entity_keys=[config_db_entity.key] ) Layer.objects.filter(db_entity_interest__db_entity__key=config_db_entity.key).delete() config_entity.db_entities.filter(key=config_db_entity.key).delete()
def create_scenario_clone(test_layer=False): scenario = FutureScenario.objects.filter(origin_instance__isnull=True)[0] if test_layer: cloned_layers = test_upload_layers(scenario) config_entities_fixture = resolve_fixture("config_entity", "config_entities", ConfigEntitiesFixture) import_scenario_configurations = config_entities_fixture.import_scenarios(scenario) for new_scenario_configuration in import_scenario_configurations: # Wipe out data and instance if it already exists matches = scenario.__class__.objects.filter(key=new_scenario_configuration['key']) if matches: on_config_entity_pre_delete_data_import( None, instance=matches[0]) matches.delete() # Save the scenario to simulate cloning # Cloning happens because future_scenario is the clone's origin_instance scenarios = scenarios_per_project(scenario.project, import_scenario_configurations) for s in scenarios: print s, s.__dict__ return scenarios