def run(self):
        try:
            pf = file(self.pidfile, 'r')
            pid = int(pf.read().strip())
            pf.close()
        except IOError:
            pid = 0

        tp = BackgroundTask(start=time(),
                            label='data acquision daemon',
                            message='init',
                            timestamp=time(),
                            pid=pid)
        tp.save()
        tp_id = tp.id

        try:
            daq = client.DataAcquisition()
        except:
            var = traceback.format_exc()
            log.error("exeption in dataaquisition daemon, %s" % var)
            tp.message = 'failed'
            tp.failed = True
            tp.timestamp = time()
            tp.save()
            raise
        tp.message = 'running...'
        tp.save()
        log.notice("started dataaquisition daemon")
        while not tp.stop_daemon:
            try:
                dt = daq.run()
            except:
                var = traceback.format_exc()
                log.debug("exeption in dataaquisition daemon, %s" % var, -1)
                daq = client.DataAcquisition()
                dt = 5
            tp = BackgroundTask.objects.get(id=tp_id)
            tp.timestamp = time()
            tp.load = 1. - max(min(dt / daq._dt, 1), 0)
            tp.save()
            if dt > 0:
                sleep(dt)
        try:
            tp = BackgroundTask.objects.get(id=tp_id)
            tp.done = True
            tp.message = 'stopped'
            tp.timestamp = time()
            tp.save()
        except:
            var = traceback.format_exc()
            log.debug("exeption in dataaquisition daemon, %s" % var, -1)
        log.notice("stopped dataaquisition daemon execution")
        self.stop()
Exemple #2
0
def run():
    label = 'pyscada.modbus.daemon'
    pid = str(os.getpid())
    # read the global settings
    if settings.PYSCADA_MODBUS.has_key('polling_interval'):
        dt_set = float(settings.PYSCADA_MODBUS['polling_interval'])
    else:
        dt_set = 5  # default value is 5 seconds

    # register the task in Backgroudtask list
    bt = BackgroundTask(start=time(),
                        label=label,
                        message='daemonized',
                        timestamp=time(),
                        pid=pid)
    bt.save()
    bt_id = bt.pk
    # start the dataaquasition
    try:
        daq = client.DataAcquisition()
    except:
        var = traceback.format_exc()
        log.error("exeption in dataaquisition daemon, %s" % var)
        # on error mark the task as failed
        bt = BackgroundTask.objects.get(pk=bt_id)
        bt.message = 'failed'
        bt.failed = True
        bt.timestamp = time()
        bt.save()
        raise

    # mark the task as running
    bt = BackgroundTask.objects.get(pk=bt_id)
    bt.timestamp = time()
    bt.message = 'running...'
    bt.save()

    log.notice("started modbus dataaquisition daemon")
    err_count = 1
    # main loop
    while not bt.stop_daemon:
        t_start = time()
        try:
            daq.run()
            err_count = 1
        except:
            var = traceback.format_exc()
            # write log only
            if err_count <= 3 or err_count == 10 or err_count % 100 == 0:
                log.debug(
                    "occ: %d, exeption in dataaquisition daemon\n\n %s" %
                    (err_count, var), -1)
            err_count += 1
            daq = client.DataAcquisition()
        bt = BackgroundTask.objects.get(pk=bt_id)
        bt.timestamp = time()
        if dt_set > 0:
            bt.load = 1. - max(min((time() - t_start) / dt_set, 1), 0)
        else:
            bt.load = 1
        bt.save()
        dt = dt_set - (time() - t_start)
        if dt > 0:
            sleep(dt)

    ## will be called after stop signal
    log.notice("stopped dataaquisition daemon execution")
    bt = BackgroundTask.objects.get(pk=bt_id)
    bt.timestamp = time()
    bt.done = True
    bt.message = 'stopped'
    bt.pid = 0
    bt.save()
Exemple #3
0
def daq_daemon_run(label):
	'''
	aquire data from the different devices/protocols
	'''
	
	
	pid     = str(os.getpid())
	devices = {}
	dt_set  = 5
	# init daemons
	for item in Device.objects.filter(protocol__daq_daemon=1, active=1):
		try:
			tmp_device = item.get_device_instance()
			if tmp_device is not None:
				devices[item.pk] = tmp_device
				dt_set = min(dt_set,tmp_device.device.polling_interval)
		except:
			var = traceback.format_exc()
			log.error("exeption while initialisation of %s:%s %s" % (label,os.linesep, var))
	# register the task in Backgroudtask list
	bt = BackgroundTask(start=time.time(),label=label,message='daemonized',timestamp=time.time(),pid = pid)
	bt.save()
	bt_id = bt.pk

	# mark the task as running
	bt = BackgroundTask.objects.get(pk=bt_id)
	bt.timestamp = time.time()
	bt.message = 'running...'
	bt.save()

	log.notice("started %s"%label)
	err_count = 0
	reinit_count = 0
	# main loop
	
	
	while not bt.stop_daemon:
		try:
			t_start = time.time()
			# handle reinit
			if bt.restart_daemon:
				reinit_count += 1
			# wait aprox 5 min (300s) runs befor reinit to avoid frequent reinits
			if bt.restart_daemon and reinit_count > 300.0/dt_set: 
				for item in Device.objects.filter(protocol__daq_daemon=1,active=1):
					try:
						tmp_device = item.get_device_instance()
						if tmp_device is not None:
							devices[item.pk] = tmp_device
							dt_set = min(dt_set,tmp_device.device.polling_interval)
					except:
						var = traceback.format_exc()
						log.error("exeption while initialisation of %s:%s %s" % (label,os.linesep, var))
				
				
				
				bt = BackgroundTask.objects.get(pk=bt_id)
				bt.timestamp = time.time()
				bt.message = 'running...'
				bt.restart_daemon = False
				bt.save()
				log.notice("reinit of %s daemon done"%label)
				reinit_count = 0
			# process write tasks
			for task in DeviceWriteTask.objects.filter(done=False,start__lte=time.time(),failed=False):
				if not task.variable.scaling is None:
					task.value = task.variable.scaling.scale_output_value(task.value)
				if devices.has_key(task.variable.device_id):
					if devices[task.variable.device_id].write_data(task.variable.id,task.value): # do write task
						task.done=True
						task.fineshed=time.time()
						task.save()
						log.notice('changed variable %s (new value %1.6g %s)'%(task.variable.name,task.value,task.variable.unit.description),task.user)
					else:
						task.failed = True
						task.fineshed=time.time()
						task.save()
						log.error('change of variable %s failed'%(task.variable.name),task.user)
				else:
					task.failed = True
					task.fineshed=time.time()
					task.save()
					log.error('device id not valid %d '%(task.variable.device_id),task.user)
	
			# start the read tasks
			data = [[]]
			
			for item in devices.itervalues():
				# todo check for polling interval
				# do actions
				tmp_data = item.request_data() # query data
				if  isinstance(tmp_data,list):
					if len(tmp_data) > 0:
						if len(data[-1])+len(tmp_data) < 998 :
							# add to the last write job
							data[-1] += tmp_data
						else:
							# add to next write job
							data.append(tmp_data)

			# write data to the database
			for item in data:
				RecordedData.objects.bulk_create(item)
			# update BackgroudtaskTask
			bt = BackgroundTask.objects.get(pk=bt_id)
			bt.timestamp = time.time()
			if dt_set>0:
				bt.load= max(min((time.time()-t_start)/dt_set,1),0)
			else:
				bt.load= 1
			bt.save()
			dt = dt_set -(time.time()-t_start)
			if dt>0:
				time.sleep(dt)
			err_count = 0
		except:
			var = traceback.format_exc()
			err_count +=1
			# write log only
			if err_count <= 3 or err_count%10 == 0:
				log.debug("occ: %d, exeption in %s daemon%s%s %s" % (err_count,label,os.linesep,os.linesep,var),-1)
			if err_count > 100:
				break
					
	## will be called after stop signal
	try:
		bt = BackgroundTask.objects.get(pk=bt_id)
		bt.timestamp = time.time()
		bt.done = True
		bt.message = 'stopped'
		bt.pid = 0
		bt.save()
	except:
		var = traceback.format_exc()
		log.error("exeption while shootdown of %s:%s %s" % (label,os.linesep, var))
	log.notice("stopped %s execution"%label)
Exemple #4
0
def daemon_run(label,handlerClass):
	pid     = str(os.getpid())

	# init daemon
	
	try:
		mh = handlerClass()
		dt_set = mh.dt_set
	except:
		var = traceback.format_exc()
		log.error("exeption while initialisation of %s:%s %s" % (label,os.linesep, var))
		raise
	# register the task in Backgroudtask list
	bt = BackgroundTask(start=time.time(),label=label,message='daemonized',timestamp=time.time(),pid = pid)
	bt.save()
	bt_id = bt.pk

	# mark the task as running
	bt = BackgroundTask.objects.get(pk=bt_id)
	bt.timestamp = time.time()
	bt.message = 'running...'
	bt.save()

	log.notice("started %s"%label)
	err_count = 0
	# main loop
	while not bt.stop_daemon:
		t_start = time.time()
		if bt.message == 'reinit':
			mh = handlerClass()
			bt = BackgroundTask.objects.get(pk=bt_id)
			bt.timestamp = time.time()
			bt.message = 'running...'
			bt.save()
			log.notice("reinit of %s daemon done"%label)
		try:
			# do actions
			data = mh.run() # query data and write to database
			if data:
				RecordedData.objects.bulk_create(data)
			err_count = 0
		except:
			var = traceback.format_exc()
			err_count +=1
			# write log only
			if err_count <= 3 or err_count == 10 or err_count%100 == 0:
				log.debug("occ: %d, exeption in %s daemon%s%s %s" % (err_count,label,os.linesep,os.linesep,var),-1)
			
			# do actions
			mh = handlerClass()
		
		
		# update BackgroudtaskTask
		bt = BackgroundTask.objects.get(pk=bt_id)
		bt.timestamp = time.time()
		if dt_set>0:
			bt.load= max(min((time.time()-t_start)/dt_set,1),0)
		else:
			bt.load= 1
		bt.save()
		dt = dt_set -(time.time()-t_start)
		if dt>0:
			time.sleep(dt)

	## will be called after stop signal
	try:
		bt = BackgroundTask.objects.get(pk=bt_id)
		bt.timestamp = time.time()
		bt.done = True
		bt.message = 'stopped'
		bt.pid = 0
		bt.save()
	except:
		var = traceback.format_exc()
		log.error("exeption while shootdown of %s:%s %s" % (label,os.linesep, var))
	log.notice("stopped %s execution"%label)
Exemple #5
0
    def run(self):
        """
        this function will be called every self.dt_set seconds
            
        request data
            
        tm_wday 0=Monday 
        tm_yday   
        """
        today = date.today()
        # only start new jobs after change the day changed
        if self._currend_day != gmtime().tm_yday:
            self._currend_day = gmtime().tm_yday
            for job in ScheduledExportTask.objects.filter(
                    active=1):  # get all active jobs

                add_task = False
                if job.export_period == 1:  # daily
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(1)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = 'daily_export_%d_%s' % (job.pk,
                                                              job.label)
                    add_task = True
                elif job.export_period == 2 and time.gmtime(
                ).tm_yday % 2 == 0:  # on even days (2,4,...)
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(2)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = 'two_day_export_%d_%s' % (job.pk,
                                                                job.label)
                    add_task = True
                elif job.export_period == 7 and time.gmtime(
                ).tm_wday == 0:  # on every monday
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(7)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = 'weekly_export_%d_%s' % (job.pk,
                                                               job.label)
                    add_task = True
                elif job.export_period == 14 and time.gmtime(
                ).tm_yday % 14 == 0:  # on every second monday
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(14)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = 'two_week_export_%d_%s' % (job.pk,
                                                                 job.label)
                    add_task = True
                elif job.export_period == 30 and time.gmtime(
                ).tm_yday % 30 == 0:  # on every 30 days
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(30)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = '30_day_export_%d_%s' % (job.pk,
                                                               job.label)
                    add_task = True

                if job.day_time == 0:
                    end_time = '%s %02d:59:59' % (
                        (today - timedelta(1)).strftime('%d-%b-%Y'), 23
                    )  # "%d-%b-%Y %H:%M:%S"
                else:
                    end_time = '%s %02d:59:59' % (
                        today.strftime('%d-%b-%Y'), job.day_time - 1
                    )  # "%d-%b-%Y %H:%M:%S"
                end_time = mktime(
                    datetime.strptime(end_time,
                                      "%d-%b-%Y %H:%M:%S").timetuple())
                # create ExportTask
                if add_task:

                    et = ExportTask(\
                        label = filename_suffix,\
                        datetime_max = datetime.fromtimestamp(end_time,UTC),\
                        datetime_min = datetime.fromtimestamp(start_time,UTC),\
                        filename_suffix = filename_suffix,\
                        mean_value_period = job.mean_value_period,\
                        file_format = job.file_format,\
                        datetime_start = datetime.fromtimestamp(end_time+60,UTC)\
                        )
                    et.save()

                    et.variables.add(*job.variables.all())

        ## check runnging tasks and start the next Export Task
        running_jobs = ExportTask.objects.filter(busy=True, failed=False)
        if running_jobs:
            for job in running_jobs:
                if time() - job.start() < 30:
                    # only check Task wenn it is running longer then 30s
                    continue

                if job.backgroundtask is None:
                    # if the job has no backgroundtask assosiated mark as failed
                    job.failed = True
                    job.save()
                    continue

                if time() - job.backgroundtask.timestamp < 60:
                    # if the backgroundtask has been updated in the past 60s wait
                    continue

                if job.backgroundtask.pid == 0:
                    # if the job has no valid pid mark as failed
                    job.failed = True
                    job.save()
                    continue

                # check if process is alive
                try:
                    os.kill(job.backgroundtask.pid, 0)
                except OSError:
                    job.failed = True
                    job.save()
                    continue

                if time() - job.backgroundtask.timestamp > 60 * 20:
                    # if there is not update in the last 20 minutes terminate
                    # the process and mark as failed
                    os.kill(job.backgroundtask.pid, 15)
                    job.failed = True
                    job.save()
                    continue

        else:
            # start the next Export Task
            wait_time = 1  # wait one second to start the job
            job = ExportTask.objects.filter(\
                done=False,\
                busy=False,\
                failed=False,\
                datetime_start__lte=datetime.now(UTC)).first() # get all jobs
            if job:
                log.debug(' started Timer %d' % job.pk)
                Timer(wait_time, _export_handler, [job, today]).start()
                if job.datetime_start == None:
                    job.datetime_start = datetime.now(UTC)
                job.busy = True
                job.save()

        ## delete all done jobs older the 60 days
        for job in ExportTask.objects.filter(
                done=True,
                busy=False,
                datetime_start__gte=datetime.fromtimestamp(
                    time() + 60 * 24 * 60 * 60, UTC)):
            job.delete()
        ## delete all failed jobs older the 60 days
        for job in ExportTask.objects.filter(
                failed=True,
                datetime_start__gte=datetime.fromtimestamp(
                    time() + 60 * 24 * 60 * 60, UTC)):
            job.delete()
        return None  # because we have no data to store
Exemple #6
0
    def run(self):
        """
        this function will be called every self.dt_set seconds
            
        request data
            
        tm_wday 0=Monday 
        tm_yday   
        """
        today = date.today()
        # only start new jobs after change the day changed
        if self._currend_day != gmtime().tm_yday:
            self._currend_day = gmtime().tm_yday
            for job in ScheduledExportTask.objects.filter(
                    active=1):  # get all active jobs

                add_task = False
                if job.export_period == 1:  # daily
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(1)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = 'daily_export_%d_%s' % (job.pk,
                                                              job.label)
                    add_task = True
                elif job.export_period == 2 and time.gmtime(
                ).tm_yday % 2 == 0:  # on even days (2,4,...)
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(2)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = 'two_day_export_%d_%s' % (job.pk,
                                                                job.label)
                    add_task = True
                elif job.export_period == 7 and time.gmtime(
                ).tm_wday == 0:  # on every monday
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(7)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = 'weekly_export_%d_%s' % (job.pk,
                                                               job.label)
                    add_task = True
                elif job.export_period == 14 and time.gmtime(
                ).tm_yday % 14 == 0:  # on every second monday
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(14)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = 'two_week_export_%d_%s' % (job.pk,
                                                                 job.label)
                    add_task = True
                elif job.export_period == 30 and time.gmtime(
                ).tm_yday % 30 == 0:  # on every 30 days
                    start_time = '%s %02d:00:00' % (
                        (today - timedelta(30)).strftime('%d-%b-%Y'),
                        job.day_time)  # "%d-%b-%Y %H:%M:%S"
                    start_time = mktime(
                        datetime.strptime(start_time,
                                          "%d-%b-%Y %H:%M:%S").timetuple())
                    filename_suffix = '30_day_export_%d_%s' % (job.pk,
                                                               job.label)
                    add_task = True

                if job.day_time == 0:
                    end_time = '%s %02d:59:59' % (
                        (today - timedelta(1)).strftime('%d-%b-%Y'), 23
                    )  # "%d-%b-%Y %H:%M:%S"
                else:
                    end_time = '%s %02d:59:59' % (
                        today.strftime('%d-%b-%Y'), job.day_time - 1
                    )  # "%d-%b-%Y %H:%M:%S"
                end_time = mktime(
                    datetime.strptime(end_time,
                                      "%d-%b-%Y %H:%M:%S").timetuple())
                # create ExportTask
                if add_task:
                    if job.mean_value_period == 0:
                        mean_value_period = 5
                    else:
                        mean_value_period = job.mean_value_period

                    et = ExportTask(\
                        label = filename_suffix,\
                        time_max = end_time,\
                        time_min=start_time,\
                        filename_suffix = filename_suffix,\
                        mean_value_period = mean_value_period,\
                        file_format = job.file_format,\
                        start = end_time+60\
                        )
                    et.save()

                    et.variables.add(*job.variables.all())

        ## iter over all Export Tasks
        wait_time = 1  # wait one second to start the job
        for job in ExportTask.objects.filter(
                done=False, busy=False, failed=False,
                start__lte=time()):  # get all jobs
            log.debug(' started Timer %d' % job.pk)
            Timer(wait_time, _export_handler, [job, today]).start()
            job.busy = True
            job.save()

        ## delete all done jobs older the 60 days
        for job in ExportTask.objects.filter(done=True,
                                             busy=False,
                                             start__gte=time() +
                                             60 * 24 * 60 * 60):
            job.delete()
        ## delete all failed jobs older the 60 days
        for job in ExportTask.objects.filter(failed=True,
                                             start__gte=time() +
                                             60 * 24 * 60 * 60):
            job.delete()
        return None  # because we have no data to store
Exemple #7
0
def _export_data_to_h5(first_time_id, last_time_id, bf, tp, pre_data):
    tp.timestamp = time()
    tp.message = 'reading time values from SQL'
    tp.save()

    first_time = RecordedTime.objects.get(id=first_time_id)
    time_id_min = BackgroundTask.objects.filter(
        label='data acquision daemon', start__lte=first_time.timestamp).last()
    if time_id_min:
        time_id_min = RecordedTime.objects.filter(
            timestamp__lte=time_id_min.start).last()
        if time_id_min:
            time_id_min = time_id_min.id
            log.debug(("time_id_min %d to first_time_id %d") %
                      (time_id_min, first_time_id))
        else:
            time_id_min = 1
    else:
        time_id_min = 1

    timevalues = [
        timestamp_unix_to_matlab(element)
        for element in RecordedTime.objects.filter(
            id__range=(first_time_id,
                       last_time_id)).values_list('timestamp', flat=True)
    ]
    time_ids = list(
        RecordedTime.objects.filter(id__range=(first_time_id,
                                               last_time_id)).values_list(
                                                   'id', flat=True))

    tp.timestamp = time()
    tp.message = 'writing time values to file'
    tp.save()

    bf.write_data('time', float64(timevalues))
    bf.reopen()

    data = {}
    active_vars = list(
        Variable.objects.filter(active=1, record=1,
                                client__active=1).values_list('pk', flat=True))
    tp.timestamp = time()
    tp.message = 'reading float data values from SQL'
    tp.save()

    raw_data = list(
        RecordedDataFloat.objects.filter(
            time_id__range=(first_time_id, last_time_id),
            variable_id__in=active_vars).values_list('variable_id', 'time_id',
                                                     'value'))

    tp.timestamp = time()
    tp.message = 'prepare raw float data'
    tp.save()

    for item in raw_data:
        if not data.has_key(item[0]):
            data[item[0]] = []
        data[item[0]].append([item[1], item[2]])

    tp.timestamp = time()
    tp.message = 'reading int data values from SQL'
    tp.save()

    raw_data = []
    raw_data = list(
        RecordedDataInt.objects.filter(
            time_id__range=(first_time_id, last_time_id),
            variable_id__in=active_vars).values_list('variable_id', 'time_id',
                                                     'value'))

    tp.timestamp = time()
    tp.message = 'prepare raw int data'
    tp.save()

    for item in raw_data:
        if not data.has_key(item[0]):
            data[item[0]] = []
        data[item[0]].append([item[1], item[2]])

    tp.timestamp = time()
    tp.message = 'reading bool data values from SQL'
    tp.save()

    raw_data = []
    raw_data = list(
        RecordedDataBoolean.objects.filter(
            time_id__range=(first_time_id, last_time_id),
            variable_id__in=active_vars).values_list('variable_id', 'time_id',
                                                     'value'))

    tp.timestamp = time()
    tp.message = 'prepare raw bool data'
    tp.save()

    for item in raw_data:
        if not data.has_key(item[0]):
            data[item[0]] = []
        data[item[0]].append([item[1], item[2]])

    raw_data = []

    tp.timestamp = time()
    tp.message = 'writing data to file'
    tp.save()

    pre_data = {}
    for var in Variable.objects.filter(active=1, record=1,
                                       client__active=1).order_by('pk'):
        tp.timestamp = time()
        tp.message = 'processing variable_id %d' % var.pk
        tp.save()

        var_id = var.pk
        variable_class = var.value_class
        first_record = False
        if data.has_key(var_id):
            records = data[var_id]
            if records[0][0] == first_time_id:
                first_record = True

        else:
            records = []

        if not first_record:
            if pre_data.has_key(var_id):
                records.insert(0, pre_data[var_id])
                first_record = True
            else:
                first_record = _last_matching_record(variable_class,
                                                     first_time_id, var_id,
                                                     time_id_min)
                if first_record:
                    records.insert(0, first_record)

        if not first_record and not records:
            tmp = [0] * len(time_ids)
            if variable_class.upper() in ['FLOAT', 'FLOAT64', 'DOUBLE']:
                tmp = float64(tmp)
            elif variable_class.upper() in ['FLOAT32', 'SINGLE', 'REAL']:
                tmp = float32(tmp)
            elif variable_class.upper() in ['INT32']:
                tmp = int32(tmp)
            elif variable_class.upper() in ['WORD', 'UINT', 'UINT16']:
                tmp = uint16(tmp)
            elif variable_class.upper() in ['INT16', 'INT']:
                tmp = int16(tmp)
            elif variable_class.upper() in ['BOOL']:
                tmp = uint8(tmp)
            else:
                tmp = float64(tmp)

            bf.write_data(var.name, tmp)
            bf.reopen()
            continue

        #blow up data ##########################################################

        tmp = [0] * len(time_ids)
        t_idx = 0
        v_idx = 0
        nb_v_idx = len(records) - 1
        for id in time_ids:
            if nb_v_idx < v_idx:
                if t_idx > 0:
                    tmp[t_idx] = tmp[t_idx - 1]
            else:
                if records[v_idx][0] == id:
                    tmp[t_idx] = records[v_idx][1]
                    laid = id
                    v_idx += 1
                elif t_idx > 0:
                    tmp[t_idx] = tmp[t_idx - 1]
                elif records[v_idx][0] <= id:
                    tmp[t_idx] = records[v_idx][1]
                    laid = id
                    v_idx += 1

                if nb_v_idx > v_idx:
                    logged = False
                    while records[v_idx][0] <= id and v_idx <= nb_v_idx:
                        if not logged:
                            log.debug(
                                ("double id %d in var %d") % (id, var_id))
                            logged = True
                        v_idx += 1
            t_idx += 1
        pre_data[var_id] = tmp[-1]
        if variable_class.upper() in ['FLOAT', 'FLOAT64', 'DOUBLE']:
            tmp = float64(tmp)
        elif variable_class.upper() in ['FLOAT32', 'SINGLE', 'REAL']:
            tmp = float32(tmp)
        elif variable_class.upper() in ['INT32']:
            tmp = int32(tmp)
        elif variable_class.upper() in ['WORD', 'UINT', 'UINT16']:
            tmp = uint16(tmp)
        elif variable_class.upper() in ['INT16', 'INT']:
            tmp = int16(tmp)
        elif variable_class.upper() in ['BOOL']:
            tmp = uint8(tmp)
        else:
            tmp = float64(tmp)

        bf.write_data(var.name, tmp)
        bf.reopen()
    return pre_data
    """