def getMeasurements(**kwargs): params, meanings, mtype, filter = prepareFilter(**kwargs) ret = list() if mtype == 'NDICT': data = RealMeasurement.objects.filter(**params).order_by( 'meaning__unit', 'meaning', 'depth_from') if filter: data = data.filter(filter) ret = [ v.to_dict() + [meanings[v.meaning_id].name, meanings[v.meaning_id].unit] for v in data ] elif mtype == 'DICT': data = DictionaryMeasurement.objects.filter(**params).order_by( 'meaning', 'depth_from') if filter: data = data.filter(filter) ret = [v.to_dict() + [meanings[v.meaning_id].name] for v in data] elif mtype == 'PICT': data = Image.objects.filter(**params).order_by('depth_from') data = data.values_list('id', 'depth_from', 'depth_to', 'geophysical_depth', 'meaning__name') if filter: data = data.filter(filter) ret = [{ "id": img[0], "depth_from": img[1], "depth_to": img[2], "geophysical_depth": img[3], "meaning": img[4] } for img in data] return _JsonResponse(ret)
def reset(request): if request.method == 'GET': return _JsonResponse(getDumpFiles()) elif request.method == 'PUT': fname = json.loads(request.read().decode()) if fname and os.path.isfile('%s/%s.sql' % (DUMPS_DIRNAME, fname)): logger.info("User %s started restoring database %s.sql ..." % (request.user.username, fname)) os.system( "PGPASSWORD='******' pg_restore -U %s -d %s --clean < %s/%s.sql" % (version.models.getDBPassword(), version.models.getDBUser(), version.models.getDBName(), DUMPS_DIRNAME, fname)) logger.info("Database %s.sql was successfully restored..." % fname) return HttpResponse() elif request.method == 'POST': if len(getDumpFiles()) < settings.MAX_DUMP_FILES: os.system( "PGPASSWORD='******' pg_dump -U %s -Fc %s > %s" % (version.models.getDBPassword(), version.models.getDBUser(), version.models.getDBName(), prepareDumpName())) return HttpResponse() else: raise TooManyDumpFiles elif request.method == 'DELETE': fname = json.loads(request.read().decode()) if fname and os.path.isfile('%s/%s.sql' % (DUMPS_DIRNAME, fname)): os.remove('%s/%s.sql' % (DUMPS_DIRNAME, fname)) return HttpResponse()
def data(request): if request.method == 'GET': with transaction.atomic(): params, meanings, _, filter = prepareFilter(**request.GET) vals = RealMeasurement.objects.filter(**params) dicts = DictionaryMeasurement.objects.filter( **params).exclude(meaning_id__in=MeaningDict.objects.filter(section='Stratygrafia')) boreholes = {bh.id: bh.name for bh in Borehole.objects.all()} if filter: vals = vals.filter(filter) dicts = dicts.filter(filter) curdepth = None ret = { 'boreholes': [], 'meanings': [ m.id for m in meanings.values()]} data = [] row = None for d in sorted( list(vals) + list(dicts), key=lambda x: ( x.depth_from, x.borehole_id, x.meaning_id)): if d.depth_from != curdepth: if row: data.append(row) curdepth = d.depth_from row = [float(curdepth) / 100.0] if d.borehole_id not in ret['boreholes']: ret['boreholes'].append(d.borehole_id) row += ['' for i in range(ret['boreholes'].index(d.borehole_id) * len(ret['meanings']) - len(row) + 1)] if len(ret['meanings']) > 1: row += ['' for i in range(ret['meanings'].index( d.meaning_id) - (len(row) - 1) % len(ret['meanings']))] row.append( d.dictionary.value if hasattr( d, 'dictionary') else d.value) if row: data.append(row) for row in data: while len(row) <= len(ret['boreholes']) * len(ret['meanings']): row.append('') ret['boreholes'] = [boreholes[bh] for bh in ret['boreholes']] ret['meanings'] = [{'name': m.name, 'unit': m.unit} for m in meanings.values()] ret['data'] = data return _JsonResponse(ret)
def doExport(request, borehole_id, depth_from, depth_to, lang): ''' Creates csv file and saves it in tmp folder. Returns name of generated file. ''' req = request.read() chosenMeaningsId = json.loads(req.decode()) borehole = Borehole.objects.get(id=borehole_id) filename = generateReportFilename(borehole, depth_from, depth_to) if not os.path.exists(tmpDir): os.makedirs(tmpDir) with open(tmpDir + filename, 'w') as csvfile: writer = csv.writer(csvfile, delimiter=';', lineterminator='\n') if not os.path.isfile('values/dicts/%s_normal.json' % lang): raise NoDictionaryFile json_file = open('values/dicts/%s_normal.json' % lang) dicts = json.load(json_file) writeTopHeader(writer, request, borehole) writeHeader(writer, chosenMeaningsId, dicts) meaningColDict = getMeaningDict(chosenMeaningsId) rows = getRows(borehole, depth_from, depth_to, chosenMeaningsId, meaningColDict, dicts) for r in rows: r.writeRow(writer) json_file.close() return _JsonResponse({'filename': filename})
def tables(request, borehole_id): if request.method == 'GET': with transaction.atomic(): params, meanings, _, filter = prepareFilter( **dict(request.GET, **{'borehole_id': borehole_id})) rmeasurements = RealMeasurement.objects.filter(**params).order_by( 'depth_from', 'meaning') dmeasurements = DictionaryMeasurement.objects.filter( **params).exclude(meaning_id__in=MeaningDict.objects.filter( section='Stratygrafia')).order_by('depth_from', 'meaning') if filter: rmeasurements = rmeasurements.filter(filter) dmeasurements = dmeasurements.filter(filter) measurements = sorted(list(rmeasurements) + list(dmeasurements), key=lambda x: (x.depth_from, x.meaning_id)) curdepth = None ret = {'header': [{'name': 'DEPTH_FROM'}]} data = [] row = None temp = list() temp2 = list() for d in measurements: if d.meaning_id not in temp2: temp.append(d) temp2.append(d.meaning_id) temp = sorted(temp, key=lambda x: x.meaning_id) ret['header'] += ([{ 'name': meanings[d.meaning_id].name, 'unit': meanings[d.meaning_id].unit } for d in temp]) temp = [d.meaning_id for d in temp] for d in measurements: # if d.meaning_id not in temp: # ret['header'].append({'name' : meanings[d.meaning_id].name, 'unit' : meanings[d.meaning_id].unit}) # temp.append(d.meaning_id) if d.depth_from != curdepth: if row: data.append(row) curdepth = d.depth_from row = [float(curdepth) / 100] while len(row) <= temp.index(d.meaning_id): row.append('') row.append(d.dictionary.value if hasattr(d, 'dictionary' ) else d.value) if row: data.append(row) for row in data: while len(row) <= len(temp): row.append('') ret['data'] = data return _JsonResponse(ret)
def getArchiveProgress(request): ret = { 'status': archiver.status, 'progress': int(float(archiver.progress) / float(archiver.max) * 100) if archiver.max else 0 } return _JsonResponse(ret)
def getStratigraphy(**kwargs): params, _, _, _ = prepareFilter(**kwargs) curthill, curceil = None, None ret = list() data = list(DictionaryMeasurement.objects.filter( **params).order_by('depth_from', 'depth_to', 'meaning')) for d in data: if d.depth_from != curthill or d.depth_to != curceil: curthill, curceil = d.depth_from, d.depth_to ret.append({'thill': d.depth_from, 'ceil': d.depth_to}) ret[-1][d.meaning_id] = d.dictionary.value return _JsonResponse(ret)
def getUsers(uid=None): ''' The function gets all existing users or particular user by id. ''' def createElement(obj): temp = { attr: str(getattr(obj, attr)) for attr in ("id", "username", "first_name", "last_name") } temp['readwrite'] = obj.groups.filter(name='editors').exists() return temp users = createElement(User.objects.get( id=uid)) if uid else [createElement(u) for u in User.objects.all()] return _JsonResponse(users)
def getMeanings(**kwargs): def createElement(obj): ret = { attr: six.text_type(getattr(obj, attr)) for attr in ("id", "name") } if 'mid' in kwargs: if obj.unit == 'DICT': ret['dictvals'] = [ val.to_dict() for val in MeaningDictValue.objects.filter(dict_id=obj) ] ret['type'] = 'dict' elif obj.unit != 'PICT': ret['unit'] = obj.unit ret['type'] = 'normal' else: ret['type'] = 'pict' else: ret['unit'] = obj.unit ret['section'] = obj.section.name return ret ret = None if 'mid' in kwargs: ret = createElement(MeaningValue.objects.get(id=kwargs['mid'])) else: if 'filter' in kwargs: if kwargs['filter'][0] == 'DICT': ret = [{ 'name': s.name, 'meanings': [ createElement(u) for u in MeaningDict.objects.filter( section=s).order_by('name') ] } for s in MeaningSection.objects.order_by('name').exclude( name='Stratygrafia')] elif kwargs['filter'][0] == 'NDICT': ret = [{ 'name': s.name, 'meanings': [ createElement(u) for u in MeaningValue.objects.filter( section=s).exclude( unit__in=['DICT', 'PICT']).order_by('name') ] } for s in MeaningSection.objects.order_by('name')] elif kwargs['filter'][0] == 'PICT': ret = [{ 'name': s.name, 'meanings': [ createElement(u) for u in MeaningImage.objects.filter( section=s).order_by('name') ] } for s in MeaningSection.objects.order_by('name')] elif kwargs['filter'][0] == 'STRAT': ret = [ createElement(u) for u in MeaningDict.objects.filter( section='Stratygrafia').order_by('id') ] else: ret = [{ 'name': s.name, 'meanings': [ createElement(u) for u in MeaningValue.objects.filter( section=s).order_by('name') ] } for s in MeaningSection.objects.order_by('name')] return _JsonResponse(ret)
def getSections(sid=None): return _JsonResponse( [s.name for s in MeaningSection.objects.order_by('name')])
def image(request, measurement_id=None): ''' The service for getting a list of all existing images or getting particular image ''' if measurement_id is None and "id" in request.GET: measurement_id = request.GET["id"] if measurement_id is not None: img = models.Image.objects.get(id=measurement_id) if request.method == 'GET': # get image with id response = HttpResponse() response['content-type'] = 'image/jpeg' response.write(img.imagedata) return response elif request.method == 'DELETE': # delete image with id img.delete() logger.info("User %s deleted image" % request.user.username) return _JsonResponse({'status': 'ok'}) if request.method == 'POST': # upload file here depth_from = None if "depth_from" in request.POST: depth_from = request.POST["depth_from"] elif "geophysical_depth" in request.POST: depth_from = request.POST["geophysical_depth"] else: depth_from = request.POST["depth"] depth_from = float(depth_from) * 100 depth_to = float(request.POST['depth_to']) * 100 if ( 'depth_to' in request.POST and int(request.POST['depth_to']) * 100 > depth_from ) else depth_from + float(settings.MEASUREMENT_IMAGE_HEIGHT_CM) geodepth = None if "geophysical_depth" in request.POST: geodepth = request.POST["geophysical_depth"] elif "depth_from" in request.POST: geodepth = request.POST["depth_from"] else: geodepth = request.POST["depth"] geodepth = float(geodepth) * 100 meaning = MeaningImage.objects.get( id=request.POST['meaning']) if 'meaning' in request.POST else None ret = None imgdata = request.FILES['image_path'].read() if Image.open(BytesIO(imgdata)).size == ( settings.MEASUREMENT_IMAGE_WIDTH_PX, settings.MEASUREMENT_IMAGE_HEIGHT_PX): ret = HttpResponse() else: ret = HttpResponse(status=415) img = add_image( imgdata, borehole=Borehole.objects.get(id=request.POST["borehole_id"]), depth_from=depth_from, depth_to=depth_to, geophysical_depth=geodepth, meaning=meaning) logger.info("User %s added new photo" % request.user.username) return ret if request.method == 'PUT': global archiver borehole = Borehole.objects.get( id=json.loads(request.read().decode('utf-8'))["borehole_id"]) if (archiver.isBusy()): return HttpResponse(status=503) archiver.startRegeneration(request, borehole) logger.info("User %s started photo regeneration" % request.user.username) return HttpResponse() # get list of images imgs = models.Image.objects.filter( meaning=None, borehole=Borehole.objects.get( id=request.GET["borehole_id"])).order_by('depth_from') imgs = imgs.values_list('id', 'depth_from', 'depth_to', 'geophysical_depth') if "start_depth" in request.GET: imgs = imgs.filter(depth_to__gte=int(request.GET["start_depth"]) * 100) if "stop_depth" in request.GET: imgs = imgs.filter(depth_from__lte=int(request.GET["stop_depth"]) * 100) return _JsonResponse([{ "id": img[0], "depth_from": img[1], "depth_to": img[2], "geophysical_depth": img[3] } for img in imgs])
def archive(request, borehole_id, data_col, meaning_id, lang): if not os.path.isfile('values/dicts/%s_reverse.json' % lang): raise NoDictionaryFile data_col = int(data_col) - 1 # Users don't count from zero geodepth_col = 2 depth_from_col = 1 rows_added = 0 jsonfile = open('values/dicts/%s_reverse.json' % lang) dicts = json.load(jsonfile) csvfile = request.FILES['archive'] if sys.version_info[:1][0] >= 3: csvfile = io.StringIO(csvfile.read().decode(), newline='\n') reader = csv.reader(csvfile, delimiter=';') dict_measurement = True counter = 0 with transaction.atomic(): try: meaning = MeaningDict.objects.get(id=meaning_id) except MeaningDict.DoesNotExist: meaning = MeaningValue.objects.get(id=meaning_id) dict_measurement = False borehole = Borehole.objects.get(id=borehole_id) for row in reader: # ignore rows with blank data field counter += 1 if (counter <= 2 or len(row[data_col]) == 0): continue depth_from = None try: depth_from = float(row[depth_from_col].replace(',', '.')) except ValueError: depth_from = float(row[geodepth_col].replace(',', '.')) depth_from = int(depth_from * 100) depth_to = depth_from + 1 geodepth = None try: geodepth = int( float((row[geodepth_col]).replace(',', '.')) * 100) except ValueError: geodepth = depth_from value = row[data_col] if not dict_measurement: if not RealMeasurement.objects.filter( borehole=borehole, depth_from=depth_from, meaning=meaning).exists(): RealMeasurement.objects.create(borehole=borehole, geophysical_depth=geodepth, depth_from=depth_from, depth_to=depth_to, value=float( value.replace(',', '.')), meaning=meaning) else: raise MeasurementsDuplicated else: if not DictionaryMeasurement.objects.filter( borehole=borehole, depth_from=depth_from, meaning=meaning).exists(): if sys.version_info[:1][0] < 3: value = value.decode('utf-8') DictionaryMeasurement.objects.create( borehole=borehole, geophysical_depth=geodepth, depth_from=depth_from, depth_to=depth_to, dictionary=MeaningDictValue.objects.get( value=dicts.get(value, value), dict_id=meaning), meaning=meaning) else: raise MeasurementsDuplicated rows_added += 1 jsonfile.close() logger.info('User %s added %d rows from csv file: ' % (request.user.username, rows_added)) return _JsonResponse({'rows': rows_added})