コード例 #1
0
ファイル: importer.py プロジェクト: ddsc/ddsc-worker
def import_csv(src, usr_id):
    usr = User.objects.get(pk=usr_id)
    tsobj = data_convert(src)
    tsgrouped = tsobj.groupby('SensorID')
    nr = len(tsgrouped)
    nr = str(nr)
    logger.debug('There are %r timeseries in file : %r' % (nr, src))
    auth_tag = 0
    for name, tsobj_grouped in tsgrouped:
        remoteid = tsobj_grouped['SensorID'][0]
        ts = get_auth(usr, remoteid)
        if ts is False:
            logger.debug(
                '[x] Timeseries %r has been rejected because of authorization',
                remoteid
            )
            auth_tag += 1
        else:
            write2_cassandra(tsobj_grouped, ts, src)
    if str(auth_tag) == nr:
        logger.error('[x] File:--%r-- has been fully rejected' % src)
        data_move(src, ERROR_file)
        raise Exception(
            "[x] In : %r  All of the timeseries failed"
            " to be imported because of auth" % (src))
    elif auth_tag == 0:
        data_move(src, OK_file)
        logger.info('[x] File:--%r-- has been successfully imported' % src)
    else:
        data_move(src, ERROR_file)
        logger.warning('[x] File:--%r-- has been only partly imported' % src)
        raise Exception(
            "[x] In : %r  Some of the timeseries failed"
            " to be imported because of auth" % (src))
コード例 #2
0
ファイル: importer.py プロジェクト: ddsc/ddsc-worker
def import_pi_xml(src, usr_id):
    logger.info("[x] Importing %r" % src)
    reader = PiXmlReader(src)

    usr = User.objects.get(pk=usr_id)

    for md, df in reader.get_series():
        loc = md['header']['locationId']  # required
        para = md['header']['parameterId']  # required
        unit = md['header']['timeStep']['@unit']  # required
        div = md['header']['timeStep'].get('@divider', '')  # optional
        mul = md['header']['timeStep'].get('@multiplier', '')  # optional
        remote_id = loc + '::' + para + '::' + unit + '::' + div + '::' + mul
        ts = get_auth(usr, remote_id)
        if ts is False:
            data_move(src, ERROR_file)
            logger.error(
                '[x] File:--%r-- has been rejected because of authorization' %
                src)
            raise Exception("[x] %r _FAILED to be imported" % (src))
        else:
            try:
                del df['flag']
                ts.set_events(df)
                ts.save()
                logger.debug("[x] %r _written" % (src))
            except:
                logger.error(
                    "[x] %r _FAILED to be written to cassandra" % (src))
                data_move(src, ERROR_file)
                raise Exception('piXML file: %r ERROR to convert!' % src)

    data_move(src, OK_file)
    logger.info('[x] File:--%r-- has been successfully imported' % src)
コード例 #3
0
ファイル: importer.py プロジェクト: ddsc/ddsc-worker
def import_file(src, filename, dst, usr_id):
    usr = User.objects.get(pk=usr_id)
    logger.debug("[x] Importing %r to DB" % filename)
    timestamp = get_timestamp_by_filename(filename)

    # TODO: Shaoqing: remoteid is not guaranteed to be unique.
    # It was chosen by the supplier of the data. IMHO you
    # should use the uuid.
    remoteid = get_remoteid_by_filename(filename)
    ts = get_auth(usr, remoteid)

    if ts is not False:
        str_year = str(timestamp.year[0])
        str_month = str(timestamp.month[0])
        str_day = str(timestamp.day[0])
        store_dst = dst + ts.uuid + '/' + str_year +\
            '-' + str_month + '-' + str_day + '/'
        store_dstf = store_dst + filename
        values = {"value": store_dstf}

        ts.set_event(timestamp[0], values)
        ts.save()
        data_move(src + filename, store_dst)
        logger.info(
            '[x] File:--%r-- has been successfully imported',
            src + filename)
    else:
        logger.error('[x] File:--%r-- has been rejected' % (src + filename))
        data_move(src + filename, ERROR_file)
        raise Exception("[x] %r _FAILED to be imported" % (src + filename))
コード例 #4
0
ファイル: importer.py プロジェクト: ddsc/ddsc-worker
def import_lmw(DestinationPath, admFileName, datFileName, kwaFileName):
    ## get the user which in this case should be LMW I guess
    usr = User.objects.get(username='******')
    adm_src = admFileName
    dat_src = datFileName
    kwa_src = kwaFileName

    tsobj_indx = read_lmw(adm_src, dat_src, kwa_src)
    tsgrouped = tsobj_indx.groupby('SensorID')
    nr = len(tsgrouped)
    nr = str(nr)
    logger.debug('There are %r timeseries in file : %r' % (nr, adm_src))
    without_errors = True
    for name, tsobj_grouped in tsgrouped:
        remoteid = tsobj_grouped['SensorID'][0]
        ts = get_auth(usr, remoteid)  # user object and remote id
        # New measuring stations may appear at any time without notice.
        # It seems appropriate to import all known stations and skip
        # unkown stations instead of raising an exception.
        if ts is False:
            without_errors = False
            logger.warning(
                "Unknown ID %s in file %s.",
                remoteid, adm_src)
        else:
            tsobjYes = tsobj_grouped
            write2_cassandra(tsobjYes, ts, dat_src)

    if without_errors:
        data_move(adm_src, OK_file)
        data_move(dat_src, OK_file)
        data_move(kwa_src, OK_file)
        logger.info(
            '[x] File:--%r-- has been successfully imported',
            adm_src)
    else:
        data_move(adm_src, ERROR_file)
        data_move(dat_src, ERROR_file)
        data_move(kwa_src, ERROR_file)
        logger.error(
            '[x] File:--%r-- has not been successfully imported',
            adm_src)