def add_file(self, ds, pathname): with open(pathname) as f: contents = f.read() filename = os.path.basename(pathname) crc = abs(binascii.crc32(contents)) try: sourcefile = ds.sourcefiles.get(name=filename) except SourceFile.DoesNotExist: sourcefile = SourceFile(name=filename,datasource=ds,user=ds.user) sourcefile.crc = crc contentfile = ContentFile(contents) sourcefile.file.save(name=filename, content=contentfile) sourcefile.save()
def add_file(self, ds, pathname): with open(pathname) as f: contents = f.read() filename = os.path.basename(pathname) crc = abs(binascii.crc32(contents)) try: sourcefile = ds.sourcefiles.get(name=filename) except SourceFile.DoesNotExist: sourcefile = SourceFile(name=filename, datasource=ds, user=ds.user) sourcefile.crc = crc contentfile = ContentFile(contents) sourcefile.file.save(name=filename, content=contentfile) sourcefile.save()
def save(request,f): mon, channels = create(f) mon.user = request.user #find datasource by logger serial number mon.source = LoggerDatasource.objects.get(logger__serial__iexact = mon.serial_number) try: sf = mon.source.sourcefiles.get(name=f.name) except: sf = SourceFile(name=f.name,datasource=mon.source,user=request.user) f.seek(0) contents = f.read() sf.crc = abs(binascii.crc32(contents)) sf.file.save(name = f.name, content=ContentFile(contents)) sf.save() mon.save()
def save(request, f): mon, channels = create(f) mon.user = request.user #find datasource by logger serial number mon.datasource = LoggerDatasource.objects.get( logger__serial__iexact=mon.serial_number) try: sf = mon.datasource.sourcefiles.get(name=f.name) except: sf = SourceFile(name=f.name, datasource=mon.datasource, user=request.user) f.seek(0) contents = f.read() sf.crc = abs(binascii.crc32(contents)) sf.file.save(name=f.name, content=ContentFile(contents)) sf.save() mon.save()
def handle(self, *args, **options): dirname = options.get('dirname',None) filename = options.get('filename',None) replace = options.get('replace',False) if not dirname or filename: logger.error('supply either dirname or filename') return admin = User.objects.get(username='******') if dirname: for root,_path,files in os.walk(dirname): for filename in files: match = re.match('^export-(?P<serial>\d+)-\d{4}-',filename) if match: serial = match.group('serial') try: ds = LoggerDatasource.objects.get(name=serial) except LoggerDatasource.DoesNotExist: logger.warning('Logger {} is not defined'.format(serial)) continue with open(os.path.join(root,filename),'r') as f: contents = f.read() crc = binascii.crc32(contents) exist = ds.sourcefiles.filter(crc=crc).first() if exist: logger.warning('Sourcefile {} already exists for logger {}'.format(filename,serial)) sf = exist else: sf = SourceFile(name=filename,datasource=ds,user=admin,crc=crc) sf.file.save(filename, ContentFile(contents), save=True) logger.info('Added {}'.format(filename)) if replace: start = sf.start stop = sf.stop candidates = ds.sourcefiles.exclude(start__gt=stop) candidates = candidates.exclude(stop__lt=start) candidates = candidates.exclude(pk=sf.pk) logger.info('deleting {} sourcefiles'.format(candidates.count())) # for i,c in enumerate(candidates.order_by('start')): # logger.info('{} {} {} {}'.format(i+1, c.name, c.start, c.stop)) candidates.delete()
def handle(self, *args, **options): files = options['files'] admin = User.objects.get(username='******') wells = set() tz = pytz.timezone('Europe/Amsterdam') for fname in files: logger.info('Importing data from {}'.format(fname)) df = pd.read_csv(fname, sep='\t', index_col=0, parse_dates=True, na_values=['-']) df.drop('Datum', axis=1, inplace=True) span = [tz.localize(df.index.min()), tz.localize(df.index.max())] start, stop = span screens = set() for col in df.columns: serial, _peilbuis, name = map(lambda x: x.strip(), re.split('[:-]', col)) series = df[col] logger.info(series.name) try: datalogger = Datalogger.objects.get(serial=serial) datasource = LoggerDatasource.objects.get( logger=datalogger) io = StringIO() io.write('Datum\t{}\n'.format(name)) series.to_csv(io, sep='\t', header=False) contents = io.getvalue() crc = abs(binascii.crc32(contents)) filename = 'Export_{}_{}_{:%Y%m%d}_{:%Y%m%d}'.format( serial, name, start, stop) sourcefile = SourceFile(name=filename, datasource=datasource, user=admin, crc=crc) sourcefile.file.save(name=filename, content=io, save=True) except Exception as ex: logger.error( 'Cannot create sourcefile for logger {}: {}'.format( serial, ex)) # find out where logger is # we could use the name from the header, but this is not equal to the id of the screen in the database query = LoggerPos.objects.filter(logger=datalogger) pos = None if query.count() == 1: pos = query.first() else: # TODO: klopt niet, de if-else hieronder query1 = query.filter(start_date__range=span) if query1.count == 1: pos = query1.first() else: query2 = query.filter(end_date__range=span) if query2.count == 1: pos = query2.first() if pos is None: logger.error( 'Cannot find installation for logger {}'.format( serial)) continue screens.add(pos.screen) logger.info('File import completed') if len(screens) > 0: logger.info('Updating time series') for screen in screens: series = screen.find_series() if series: series.update(start=start, stop=stop) wells.add(screen.well) if len(wells) > 0: logger.info('Updating well charts') make_wellcharts(None, None, wells) logger.info('Done.')
def handle(self, *args, **options): files = options['files'] admin = User.objects.filter(is_superuser=True).first() wells = set() tz = pytz.timezone('Etc/GMT-1') # NL-wintertijd for pattern in files: for fname in glob.glob(pattern): logger.info('Importing data from {}'.format(fname)) df = pd.read_excel(fname, index_col=0, na_values=['-']) nrows, ncols = df.shape span = [ tz.localize(df.index.min()), tz.localize(df.index.max()) ] start, stop = span logger.info('{} loggers found'.format(ncols)) logger.info('count = {}, start = {}, stop = {}.'.format( nrows, start, stop)) screens = set() for col in df.columns: right = col.rfind('-') left = col.find(':') # serial, _peilbuis, name = map(lambda x: x.strip(),re.split('[:-]',col)) serial = col[:left].strip() name = col[right + 1:].strip() series = df[col] logger.info(col) try: datalogger = Datalogger.objects.get(serial=serial) datasource = LoggerDatasource.objects.get( logger=datalogger) io = StringIO() io.write('Datum\t{}\n'.format(name)) series.to_csv(io, sep='\t', header=False) contents = io.getvalue() crc = abs(binascii.crc32(contents)) existing = SourceFile.objects.filter(crc=crc).first() if existing: logger.info('Already exists') continue else: filename = 'Export_{}_{}_{:%Y%m%d}_{:%Y%m%d}.csv'.format( serial, name, start, stop) sourcefile = SourceFile(name=filename, datasource=datasource, user=admin, crc=crc) sourcefile.file.save(name=filename, content=io, save=True) except Exception as ex: logger.error( 'Cannot create sourcefile for logger {}: {}'. format(serial, ex)) # find out where logger is # we could use the name from the header, but this is not equal to the id of the screen in the database query = LoggerPos.objects.filter(logger=datalogger) pos = None if query.count() == 1: pos = query.first() else: # TODO: klopt niet, de if-else hieronder query1 = query.filter(start_date__range=span) if query1.count == 1: pos = query1.first() else: query2 = query.filter(end_date__range=span) if query2.count == 1: pos = query2.first() if pos is None: logger.error( 'Cannot find installation for logger {}'.format( serial)) continue screens.add(pos.screen) logger.info('File import completed') if len(screens) > 0: logger.info('Updating time series') for screen in screens: series = screen.find_series() if series: series.update(start=start, stop=stop) wells.add(screen.well) if len(wells) > 0: logger.info('Updating well charts') make_wellcharts(None, None, wells) logger.info('Done.')