def obj_get(self, request=None, **kwargs): try: dataset = Dataset.objects.get(pk=kwargs.get('dataset_pk')) except Dataset.DoesNotExist: dataset = Dataset.get_default() metadata_values = get_key_values(url=dataset.pmh_url, transform_sheet=dataset.transformation, identifier=kwargs['pk']) return dict(metadata = metadata_values, identifier=kwargs['pk'])
def obj_get(self, request=None, **kwargs): try: dataset = Dataset.objects.get(pk=kwargs.get('dataset_pk')) except Dataset.DoesNotExist: dataset = Dataset.get_default() metadata_values = get_key_values( url=dataset.pmh_url, transform_sheet=dataset.transformation, identifier=kwargs['pk']) return dict(metadata=metadata_values, identifier=kwargs['pk'])
def problems_list(request): assessment_problems = [] for entry in ProblemAssessor.objects.filter(assessor=request.user): assessment_problems.append( { "problem": entry.problem, "tasks": AssessmentTask.objects.filter( assessor=request.user, problem=entry.problem, status=1), "supervise": ( entry.problem.dataset.owner == request.user)}) context = {"assessment_problems": assessment_problems} context["datasets"] = Dataset.objects_safe(request) assessment_folder = os.listdir( os.path.join( settings.BASE_DIR, "templates", "assessment")) context["types"] = [x for x in assessment_folder if '.' not in x] return render(request, 'assessment/problems_list.html', Context(context))
def setUpClass(self): # Create 2 users, 2 datasets, 2 models and 20 documents # user0 -> dataset0 (public) -> doc00 .. doc09 (id=1..10) # user1 -> dataset1 (private) -> doc10 .. doc19 (id=11.20) for i in range(2): user = User() user.username = "******" + str(i) user.save() dataset = Dataset() dataset.owner = user dataset.text_id = "dataset" + str(i) dataset.is_public = (i == 0) dataset.save() for j in range(10): document = Document() document.dataset = dataset document.index_id = j document.title = "doc" + str(i) + str(j) document.text = "text" + str(i) + str(j) document.snippet = "snippet" + str(i) + str(j) document.url = "url" + str(i) + str(j) document.save()
def obj_get_list(self, request=None, **kwargs): try: dataset = Dataset.objects.get(pk=kwargs.get('dataset_pk')) except Dataset.DoesNotExist: dataset = Dataset.get_default() return self.get_object_list(request, dataset)
def populate_db(self): Dataset.get_default() return HttpResponse("Quite OK")
def create_model(request): if request.method == 'GET': dataset = Dataset.get_dataset(request) if not dataset: return HttpResponceForbidden() modalities = Modality.objects.filter(dataset=dataset) scripts = os.listdir(os.path.join(settings.DATA_DIR, "scripts")) unreg = [] try: folders = os.listdir( os.path.join(settings.DATA_DIR, "datasets", dataset.text_id, "models")) existing_models = [ model.text_id for model in ArtmModel.objects.filter(dataset=dataset) ] unreg = [ i for i in folders if (i not in existing_models) and (not i[0] == '.') ] except BaseException: pass context = Context({ 'dataset': dataset, 'modalities': modalities, 'scripts': scripts, 'unreg': unreg }) context['regularizers'] = REGULARIZERS if settings.DEBUG: context['DEBUG'] = True return render(request, 'models/create_model.html', context) dataset = Dataset.get_dataset(request) if not dataset: return HttpResponceForbidden() model = ArtmModel() model.dataset = dataset model.name = request.POST['model_name'] model.threshold_hier = int(request.POST['threshold_hier']) model.threshold_docs = int(request.POST['threshold_docs']) model.author = request.user model.creation_time = datetime.now() model.status = 1 model.save() # model.prepare_log() if settings.THREADING: t = Thread(target=ArtmModel.create_generic, args=( model, request.POST, ), daemon=True) t.start() else: model.create_generic(request.POST) return redirect("/model?model=" + str(model.id))
def handle(self, *args, **options): dir_name = sys.argv[3] shapefile_name = 'hydro ln' # datasource = ogr.Open(os.path.join(dir_name, shapefile_name)) datasource = ogr.Open(dir_name) layer = datasource.GetLayer(0) # creating Dataset object src_spatial_ref = layer.GetSpatialRef() geom_type = layer.GetLayerDefn().GetGeomType() geom_name = ogr.GeometryTypeToName(geom_type) shapefile = Dataset(name=shapefile_name, srs_wkt=src_spatial_ref.ExportToWkt(), geom_type=geom_name) shapefile.save() # creating Attribute objects attributes = [] layer_def = layer.GetLayerDefn() for i in range(layer_def.GetFieldCount()): field_def = layer_def.GetFieldDefn(i) attr = Attribute(dataset=shapefile, name=field_def.GetName(), type=field_def.GetType(), width=field_def.GetWidth(), precision=field_def.GetPrecision()) attributes.append(attr) Attribute.objects.bulk_create(attributes) # creating Feature objects dst_spatial_ref = osr.SpatialReference() dst_spatial_ref.ImportFromEPSG(4326) coord_transform = osr.CoordinateTransformation(src_spatial_ref, dst_spatial_ref) feature_objs = [] for i in range(layer.GetFeatureCount()): src_feature = layer.GetFeature(i) src_geometry = src_feature.GetGeometryRef() src_geometry.Transform(coord_transform) geometry = GEOSGeometry(src_geometry.ExportToWkt()) geometry = wrap_geos_geometry(geometry) geom_field = calc_geometry_field(geom_name) fields = {} fields['dataset'] = shapefile fields[geom_field] = geometry attribute_dict = dict() for attr in attributes: success, result = get_ogr_feature_attribute(attr, src_feature) if not success: # os.remove(fname) # shutil.rmtree(dir_name) shapefile.delete() return result attribute_dict.update({attr.name: result}) fields['attribute_values'] = attribute_dict feature = Feature(**fields) feature_objs.append(feature) Feature.objects.bulk_create(feature_objs) self.stdout.write('Successfully loaded datasets.')