Exemplo n.º 1
0
def tgplot(sensor, starttime, endtime, keys=None):
    """
    DESCRIPTION
       plotting subroutine
    """
    try:
        data = read(os.path.join(mqttpath,sensor,'*'),starttime=starttime, endtime=endtime)
        matplotlib.use('Agg')
        mp.plot(data, confinex=True, outfile=os.path.join(tmppath,'tmp.png'))
        return True
    except:
        return False
Exemplo n.º 2
0
def CreateBLVPlot(db,
                  blvname,
                  blvdata,
                  starttime,
                  endtime,
                  plotdir,
                  plttitle,
                  debug=False):
    print(" Loading absolute data: {}".format(blvdata))
    absresult = read(blvdata, starttime=starttime, endtime=endtime)
    absresult = absresult.remove_flagged()
    print(" -> {} data points".format(absresult.length()[0]))
    try:
        blvflagname = blvname.replace("comp", "").replace(".txt", "")
        flags = db2flaglist(db, blvflagname)
        print("  Obtained {} flags".format(len(flags)))
        if len(flags) > 0:
            absresult = absresult.flag(flags)
            absresult = absresult.remove_flagged()
    except:
        print("  flagging failed")
    try:
        absresult = absresult._drop_nans('dx')
        absresult = absresult._drop_nans('dy')
        absresult = absresult._drop_nans('dz')
        print(" -> {} valid data points".format(absresult.length()[0]))
        func = absresult.fit(['dx', 'dy', 'dz'],
                             fitfunc='spline',
                             knotstep=0.3)
        print(" Saving to {}".format(plotdir))
        if not debug:
            mp.plot(absresult, ['dx', 'dy', 'dz'],
                    symbollist=['o', 'o', 'o'],
                    padding=[2.5, 0.005, 2.5],
                    function=func,
                    plottitle=plttitle,
                    outfile=os.path.join(plotdir, 'basegraph.png'))
        caption = "{}: Basevalues and adopted baseline".format(
            datetime.strftime(endtime, "%Y-%m-%d"))
    except:
        print("  -> error encountered when fitting data")
        caption = "Not enough data points for creating new baseline graph"
    if debug:
        print("Caption:", caption)

    return caption
Exemplo n.º 3
0
def CreateWebserviceTable(config={},
                          statusmsg={},
                          start=datetime.utcnow() - timedelta(days=7),
                          end=datetime.utcnow(),
                          debug=False):

    # 1. read data
    rawdatapath = config.get('gammarawdata')
    meteopath = config.get('meteoproducts')
    result = DataStream()
    rcsg0path = config.get('rcsg0rawdata')
    name = "{}-servicetables".format(config.get('logname'))
    connectdict = config.get('conncetedDB')

    statusmsg[name] = 'gamma webservice table successfully created'

    try:
        if debug:
            print(" -----------------------------------")
            print(" Creating WebService database table")
        print("     -> Reading SCA Gamma data...")
        gammasca = read(os.path.join(rawdatapath, 'COBSEXP_2_*'),
                        starttime=start,
                        endtime=end)
        if gammasca.length()[0] > 0:
            gammasca.header['col-t1'] = 'T (tunnel)'
            gammasca.header['unit-col-t1'] = 'deg C'
        if debug:
            print(gammasca._get_key_headers())
        print("     -> Done")
    except:
        statusmsg[name] = 'gamma table failed - critical'
        gammasca = DataStream()

    try:
        print("     -> Reading meteo data ...")
        meteo = read(os.path.join(meteopath, 'meteo-1min_*'),
                     starttime=start,
                     endtime=end)
        if debug:
            print(meteo._get_key_headers())
        if meteo.length()[0] > 0:
            if debug:
                print(meteo.length())
            meteo._move_column('y', 'var3')
            meteo._drop_column('y')  #rain - keep
            meteo._drop_column('t1')
            meteo._drop_column('var4')
            meteo._move_column('f',
                               't2')  #temp - keep -> add unit and description
            meteo._drop_column('f')
            meteo._drop_column('var2')  # wind direction - remove
            meteo.header['col-t2'] = 'T (outside)'
            meteo.header['unit-col-t2'] = 'deg C'
            meteo.header['col-var3'] = 'rain'
            meteo.header['unit-col-var3'] = 'mm/h'
            # dropping potential string columns
            meteo._drop_column('str2')
            print("     -> Done")
        else:
            statusmsg[name] = 'no meteo data'
            print("     -> Done - no data")
    except:
        statusmsg[name] = 'meteo table failed'
        meteo = DataStream()

    # 2. join with other data from meteo
    if gammasca.length()[0] > 0 and meteo.length()[0] > 0:
        #meteo = meteo.filter()
        result = mergeStreams(gammasca, meteo)
    elif gammasca.length()[0] > 0:
        result = gammasca.copy()
    else:
        result = DataStream()
    # 3. add new meta information
    result.header['StationID'] = 'SGO'
    result.header['SensorID'] = 'GAMMASGO_adjusted_0001'
    result.header['DataID'] = 'GAMMASGO_adjusted_0001_0001'
    result.header['SensorElements'] = 'Counts,Temp,OutsideTemp,Voltage,rain'
    result.header['SensorKeys'] = 'x,t1,t2,var1,var3'
    result.header['SensorGroup'] = 'services'
    result.header['SensorName'] = 'GAMMASGO'
    result.header['SensorType'] = 'Radiometry'

    if debug:
        print("    Results", result.length())
    if debug and config.get('testplot', False):
        mp.plot(result)

    # 4. export to DB as GAMMASGO_adjusted_0001_0001 in minute resolution
    if not debug:
        if result.length()[0] > 0:
            if len(connectdict) > 0:
                for dbel in connectdict:
                    dbw = connectdict[dbel]
                    # check if table exists... if not use:
                    name3 = "{}-toDB-{}".format(config.get('logname'), dbel)
                    statusmsg[name3] = 'gamma table successfully written to DB'
                    try:
                        writeDB(dbw, result)
                    except:
                        statusmsg[
                            name3] = 'gamma table could not be written to DB - disk full?'
                    # else use
                    #writeDB(dbw,datastream, tablename=...)

                    print(
                        "  -> GAMMASGO_adjusted written to DB {}".format(dbel))

    return statusmsg
Exemplo n.º 4
0
def process_data(station, sensortype, sensors, basepath, fileext, date,
                 plotvariables, **kwargs):
    '''
    DEFINITION:
        STANDARD: Reads data, writes prelim data, plots & writes final data 
	OPTIONAL: Baseline & declination correct, multiply stream, upload, 
	produce plot of last 7 days.
	NOTE: Files must be saved under proper data format:
	--> /BASEPATH/TO/DATA/IAGA-ST-CODE/INSTRUMENT...
	(e.g: /srv/archive/magnetism/tse/lemi...)
	.../raw
	.../plots
	.../filtered
	Must also have file with header information under BASEPATH/CODE/CODE_headers.txt.

    PARAMETERS:
    Variables:
        - station: 		(str) IAGA-code for station
	- sensortype:		(str) 'combined' or 'normal'
				combined = only for magnetic variometer + magnetometer
				normal = all other data
	- sensors:		(str/list) Sensor name, 'ENV05_1_0001'
				Note: for 'combined' this is a list. ['LEMI025_22_0001', 'POS1_N432_0001']
	- basepath:		(str) Path to where data is stored.
	- fileext:		(str/list) File extension of data file, e.g. 'bin', 'cdf', 'txt'
				Note: for 'combined' this is a list. ['bin', 'cdf']
	- date:			(str) Date of data in format %Y-%m-%d / YYYY-MM-DD.
        - plotvariables: 	(list) List of magpy keys to plot.
    Kwargs:
	- logger:		(logging.Logger object) Logger for logging purposes.
	- decl:			(float) Will rotate data by this value, if given
	- mult_factors:		(dict) Will multiply corresponding stream keys with these factors
	- baseline:		(dict) Will baseline correct corresponding keys with these factors
	- upload:		(bool) If True, will upload files
	- sevendayplot:		(bool) If True, will produce plot of last 7 days
	- prelim:		(bool) If True, will save prelim file

    RETURNS:
        - True / False

    EXAMPLE:
        >>> 

    APPLICATION:

    '''
    decl = kwargs.get('decl')
    mult_factors = kwargs.get('mult_factors')
    baseline = kwargs.get('baseline')
    upload = kwargs.get('upload')
    prelim = kwargs.get('prelim')
    sevendayplot = kwargs.get('sevendayplot')
    logger = kwargs.get('logger')

    if not logger:
        logging.basicConfig(level=logging.INFO)
        logger = logging.getLogger(' %s ' % station.upper())

    header_data = {}
    headersfile = os.path.join(basepath, station, '%s_headers.txt' % station)
    headers = open(headersfile, 'r')
    for line in headers:
        hdata = line.split()
        header_data[hdata[0].strip(':')] = hdata[1]

#--------------------------------------------------------------------
# 1. READ DATA, REMOVE OUTLIERS, FILTER

    if sensortype == 'combined':
        vario_sensor = sensors[0]
        magn_sensor = sensors[1]
        v_datafile = '%s_%s.%s' % (vario_sensor, date, fileext[0])
        m_datafile = '%s_%s.%s' % (magn_sensor, date, fileext[1])
        v_datapath = os.path.join(basepath, station, vario_sensor, 'raw',
                                  v_datafile)
        m_datapath = os.path.join(basepath, station, magn_sensor, 'raw',
                                  m_datafile)
        logger.info("Reading files %s and %s..." % (v_datafile, m_datafile))
        v_stream = read(v_datapath)
        m_stream = read(m_datapath)
        #v_stream.remove_outlier()
        #v_stream.remove_flagged()
        #m_stream.remove_outlier()
        #m_stream.remove_flagged()
        v_stream = v_stream.filter()
        m_stream = m_stream.filter()
        stream = mergeStreams(v_stream, m_stream)
        stream.header['col-f'] = 'F'
        stream.header['unit-col-f'] = 'nT'
        sensor = vario_sensor
        title = '%s-%s' % (vario_sensor, magn_sensor)

    elif sensortype == 'normal':
        sensor = sensors
        datafile = '%s_%s.%s' % (sensor, date, fileext)
        logger.info("Reading file %s..." % datafile)
        datapath = os.path.join(basepath, station, sensor, 'raw', datafile)
        stream = read(datapath)
        #stream.remove_outlier()
        #stream.remove_flagged()
        stream = stream.filter()
        title = sensor

    else:
        logger.error(
            "Wrong sensortype (%s). Options are 'combined' and 'normal'." %
            sensortype)

    for data_header in IAGA_headers:
        stream.header[data_header] = header_data[data_header]

    if sensor[:3].lower() == 'lem':
        stream.header['DataType'] = 'Magnetic'
        stream.header['DataComponents'] = 'x, y, z, F [nT]'
        stream.header['DataDigitalSampling'] = '0.1s, 5s'
        dx = 1000. * stream.header['DataCompensationX']
        dy = 1000. * stream.header['DataCompensationY']
        dz = 1000. * stream.header['DataCompensationZ']
        stream.header['DataSensorOrientation'] = "%s, %s, %s" % (dx, dy, dz)
    elif sensor[:3].lower() == 'env':
        stream.header['DataType'] = 'Environmental'
        stream.header[
            'DataComponents'] = 'T (ambient) [C], RH [%], T (dewpoint) [C]'
        stream.header['DataDigitalSampling'] = '1s'
        stream._move_column(plotvariables[0], 'x')
        stream._move_column(plotvariables[1], 'y')
        stream._move_column(plotvariables[2], 'z')
        plotvariables = ['x', 'y', 'z']
    elif sensor[:3].lower() == 'cs1':
        stream.header['DataType'] = 'Magnetic'
        stream.header['DataComponents'] = 'F [nT]'
        stream.header['DataDigitalSampling'] = '1s'

    filenamebegins = '%s_0002' % (title)
    #filenamebegins = '%s_%s_' % (station,title)

    if prelim:
        prelim_path = os.path.join(basepath, station, sensor, 'prelim')
        stream.write(prelim_path,
                     filenamebegins=filenamebegins + '_',
                     format_type='IAGA')
        logger.info("Preliminary data written to %s." % prelim_path)

#--------------------------------------------------------------------
# 2. (OPTIONAL) ROTATE, MULTIPLY, BASELINE CORRECT
#    Steps for PRELIMINARY --> FINAL

    if decl:
        stream.rotation(alpha=decl)

    if mult_factors:
        stream.multiply(mult_factors)

    if baseline:
        stream.offset(baseline)

#--------------------------------------------------------------------
# 3. PLOT

    sensorpadding = {'env': 0.5, 'pos': 10, 'lem': 5, 'cs1': 10}

    plotname = '%s_%s.png' % (filenamebegins, date)
    outfile = os.path.join(basepath, station, sensor, 'plots', plotname)
    mp.plot(stream,
            plotvariables,
            plottitle='%s %s (%s)' % (station.upper(), title, date),
            bgcolor='white',
            confinex=True,
            fullday=True,
            outfile=outfile,
            padding=sensorpadding[sensor[:3].lower()])

    logger.info("Data plotted to %s." % outfile)

    #--------------------------------------------------------------------
    # 4. SAVE & WRITE STREAM TO MINUTE FILE

    #filenamebegins = '%s_%s_' % (station,title)
    finalpath = os.path.join(basepath, station, sensor, filenamebegins)
    stream.write(finalpath,
                 filenamebegins=filenamebegins + '_',
                 format_type='IAGA')

    logger.info("Final data written to %s." % finalpath)

    #--------------------------------------------------------------------
    # 5. UPLOAD (plot + filtered data)

    cred = 'cobshomepage'
    myproxy = mpcred.lc(cred, 'address')
    login = mpcred.lc(cred, 'user')
    passwd = mpcred.lc(cred, 'passwd')
    #passwd = 'ku7tag8!haus' # TODO CHANGE THIS BACK
    port = mpcred.lc(cred, 'port')
    ftppath = 'cmsjoomla/images/stories/currentdata/'

    upload = False
    if upload:
        try:
            filtered_file = '%s_%s.txt' % (filenamebegins, date)
            filtered_path = os.path.join(basepath, station, sensor,
                                         filenamebegins, filtered_file)
            logger.info("Uploading %s..." % filtered_path)
            ftpdatatransfer(
                localfile=filtered_path,
                ftppath=ftppath,  # TODO 
                myproxy=myproxy,
                port=port,
                login=login,
                passwd=passwd,
                raiseerror=True,
                logfile=os.path.join(basepath, station,
                                     '%s-transfer.log' % station))
        except:
            logger.error("Uploading failed.")

        try:
            plot_file = '%s_%s.png' % (filenamebegins, date)
            plot_path = os.path.join(basepath, station, title, 'plots',
                                     plotfile)
            logger.info("Uploading %s..." % plot_path)
            ftpdatatransfer(
                localfile=plotpath,
                ftppath=ftppath,  # TODO 
                myproxy=myproxy,
                port=port,
                login=login,
                passwd=passwd,
                raiseerror=True,
                logfile=os.path.join(basepath, station,
                                     '%s-transfer.log' % station))
        except:
            logger.error("Uploading failed.")

#--------------------------------------------------------------------
# 6. CREATE 7-DAY PLOT (x, y, z, F) & UPLOAD

    if sevendayplot:
        today = datetime.utcnow()
        date = datetime.strftime(today, "%Y-%m-%d")
        datapath = os.path.join(basepath, station, sensor, filenamebegins, '*')
        startdate = datetime.strptime(date, '%Y-%m-%d') - timedelta(days=7)
        start = datetime.strftime(startdate, "%Y-%m-%d") + ' 00:00:00'
        end = date + ' 00:00:00'

        last7days = read(path_or_url=datapath, starttime=start, endtime=end)
        plotname = 'TSE_last7days.png'
        plotpath = os.path.join(basepath, station, '7dayplots', plotname)
        diff = eval(last7days.header['DataSensorOrientation'])
        last7days = last7days.offset(offsets={
            'x': -float(diff[0]),
            'y': -float(diff[1]),
            'z': -float(diff[2])
        })
        last7days = last7days.calc_f()

        fig = mp.plot(last7days, ['x', 'y', 'z', 'f'],
                      plottitle='%s Magnetic Data (%s - %s)' %
                      (station, start[:10], end[:10]),
                      bgcolor='white',
                      noshow=True,
                      padding=5)

        axes = gcf().get_axes()

        day = datetime.strptime(start, '%Y-%m-%d %H:%M:%S')
        while day <= datetime.strptime(end, '%Y-%m-%d %H:%M:%S'):
            if day.weekday() in [5, 6]:  # Saturday or Sunday
                t_start = day
                t_end = day + timedelta(days=1)
                for ax in axes:
                    ax.axvspan(t_start,
                               t_end,
                               facecolor='green',
                               alpha=0.3,
                               linewidth=0)
            day += timedelta(days=1)

        ax.get_xaxis().set_major_formatter(
            matplotlib.dates.DateFormatter('%d.%b\n%H:%M'))

        plt.savefig(plotpath, savedpi=80)
        ftppath = 'zamg/images/graphs/magnetism/'
        oldftppath = 'cmsjoomla/images/stories/currentdata/tse'

        scptransfer(plotpath, '94.136.40.103:' + ftppath, passwd)
Exemplo n.º 5
0
    deltaF=-0.258,
    diid='A2_WIC.txt',
    pier='A2',
    expD=4.0,
    expI=64.0,
    starttime='2015-06-01',
    endtime='2016-02-01',
    db=db,
    dbadd=True)

print absresult.length()

absresult.write('/srv/archive/WIC/DI/data',
                coverage='all',
                filenamebegins="BLV_FGE_S0252_0001_GSM90_14245_0002_A2_oc_min",
                format_type="PYSTR",
                mode='replace')

data1 = read(
    '/srv/archive/WIC/DI/data/BLV_FGE_S0252_0001_GSM90_14245_0002_A2_oc_min.txt'
)

mp.plot(data1, ['dx', 'dy', 'dz'], symbollist=['o', 'o', 'o'])

data2 = read(
    '/srv/archive/WIC/DI/data/BLV_FGE_S0252_0001_GSM90_14245_0002_A2.txt')

sub = subtractStreams(data1, data2)
mp.plot(sub, ['dx', 'dy', 'dz'], symbollist=['o', 'o', 'o'])
#writeDB(db,absresult,'BLV_FGE_S0252_0001_GSM90_14245_0002_A2')
Exemplo n.º 6
0
def main(argv):
    creddb = ''  # c
    dipath = ''  # a
    variolist = ''  # v
    variodataidlist = ''  # j
    scalarlist = ''  # s
    scalardataidlist = ''  # k
    pierlist = ''  # p
    abstypelist = ''  # y
    azimuthlist = ''  # z
    archive = ''  # w   (e.g. /srv/archive)
    identifier = 'BLV'  # f
    stationid = 'wic'  # t
    fallbackvariopath = ''  # o
    fallbackscalarpath = ''  # l
    begin = '1900-01-01'  # b
    end = datetime.strftime(datetime.utcnow(), "%Y-%m-%d")  # e
    expD = 3  # d
    expI = 64  # i
    compensation = False  # m
    rotation = False  # q
    dbadd = False  # n
    addBLVdb = False  # n
    flagging = False  # g
    createarchive = False  # r
    webdir = '/var/www/joomla/images/didaten/'  # TODO add option
    webuser = '******'
    webgroup = 'www-data'
    defaultuser = '******'
    defaultgroup = 'cobs'
    debug = False

    flaglist = []
    keepremote = False
    getremote = False
    remotecred = ''
    remotepath = ''
    variopath = ''  #
    scalarpath = ''  #

    try:
        opts, args = getopt.getopt(
            argv, "hc:a:v:j:s:k:o:mql:b:e:t:z:d:i:p:y:w:f:ngrx:u:D", [
                "cred=", "dipath=", "variolist=", "variodataidlist=",
                "scalarlist=", "scalardataidlist=", "variopath=",
                "compensation=", "rotation=", "scalarpath=", "begin=", "end=",
                "stationid=", "pierlist=", "abstypelist=", "azimuthlist=",
                "expD=", "expI=", "write=", "identifier=", "add2DB=", "flag=",
                "createarchive=", "webdir=", "keepremote", "debug="
            ])
    except getopt.GetoptError:
        print(
            'di.py -c <creddb> -a <dipath> -v <variolist>  -j <variodataidlist> -s <scalarlist> -o <variopath> -m <compensation> -q <rotation> -l <scalarpath> -b <startdate>  -e <enddate> -t <stationid>  -p <pierlist> -z <azimuthlist> -y <abstypelist> -d <expectedD> -i <expectedI> -w <writepath> -f<identifier> -n <add2DB>  -g  <flag> -r <createarchive> -x <webdir> -u <user> --keepremote'
        )
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('-------------------------------------')
            print('Description:')
            print(
                'dianalysis.py reads DI measurements and calculates DI values.'
            )
            print('Provide variometer and scalar data for correction.')
            print('Returns di values, f and collimation angles.')
            print(
                'A number of additional option allow for archiving, validity tests,'
            )
            print(
                'and output redirection. If variometer data is provided, base values'
            )
            print('are calculated. ')
            print('')
            print('-------------------------------------')
            print('Usage:')
            print(
                'di.py -c <creddb> -a <dipath> -v <variolist>  -j <variodataidlist> -s <scalarlist> -o <variopath> -l <scalarpath> -m <compensation> -q <rotation> -b <startdate>  -e <enddate> -t <stationid>  -p <pierlist> -z <azimuthlist> -y <abstypelist> -d <expectedD> -i <expectedI> -w <writepath> -n <add2DB>  -g  <flag> -r <createarchive> -x <webdir> -u <user> --keepremote'
            )
            print('-------------------------------------')
            print('Options:')
            print(
                '-c            : provide the shortcut to the data bank credentials'
            )
            print(
                '-a (required) : path to DI data - can be either a real path to a local directory'
            )
            print(
                '                    or a credential shortcut for a remote connection '
            )
            print(
                '-w (required) : archivepath for writing results and eventually accessing data.'
            )
            print('                e.g. /srv/archive. or /tmp')
            print(
                '                below this folder the following structure will be implemented:'
            )
            print('                -/srv/archive/"StationID"/DI/analyse : ')
            print(
                '                                 folder with raw data to be analyzed'
            )
            print('                -/srv/archive/"StationID"/DI/data : ')
            print(
                '                                 folder with calculated di results'
            )
            print(
                '                                 Name will be set to "BLV_" + variometerID + pier.'
            )
            print(
                '                                 This plain text file can be opend and'
            )
            print(
                '                                 analyzed with the MagPy stream package.'
            )
            print('                -/srv/archive/"StationID"/DI/raw : ')
            print(
                '                             archivefolder with successfully analyzed raw data'
            )
            print(
                '-f            : idetifier for BLV data (in database and filename) - default is BLV'
            )
            print(
                '-v            : variolist - comma separated list of variometer ids'
            )
            print(
                '-j            : variodataidlist - specify the dataids to be used fro each vario'
            )
            print('                Default: 0002 for each variometer sensor')
            print('-o            : path to variometer data')
            print(
                '-m (no input) : apply compensation field values to variometer data'
            )
            print('-q (no input) : apply rotation to variometer data')
            print('-s            : scalarpath - path to scalar data')
            print(
                '-k            : scalardataidlist - specify the dataids to be used fro each scalar'
            )
            print('                Default: 0002 for each scalar sensor')
            print(
                '-b            : startdate - begin of analysis  (not yet active)'
            )
            print(
                '-e            : enddate - default is today (not yet active)')
            print(
                '-t (required) : ID of the station i.e. the Observatory code (required if'
            )
            print('                not in meta data of DI measurements)')
            print(
                '-z            : list of astronomic azimuths of the mark from the measurement pier'
            )
            print(
                '              : Azimuthlist needs either to be empty or to have the same order '
            )
            print('                and length of the pierlist.')
            print(
                '                use "False" if the specific value should be taken from the'
            )
            print('                originalfile/database')
            print('                e.g. -p D3,H6  -z False,156.678')
            print(
                '-y            : comma separated list of absolute data types for each pier'
            )
            print('              : "di" for standard theodolite or "autodif" ')
            print('              : e.g. -p D3,H6 -m di,autodif ')
            print('-d            : expected declination')
            print('-i            : expected inclination')
            print(
                '-p (required) : name/number of the pier, comma separated list'
            )
            print('-n (no input) : add di and basevalues to data base')
            print(
                '-g (no input) : read flaglist from DB if db is opened and add flags'
            )
            print(
                '-r            : move successfully analyzed files to raw archive'
            )
            print('-x            : directory to copy non-analyzed files to.')
            print(
                '              : can be a www directory at which PHP-scripts are used to edit data.'
            )
            print('-u            : define user for which jobs are performed')
            print('              : e.g. cobs:cobsgroup')
            print(
                '--keepremote  : Don t delete remote files after dowloading them'
            )
            print('-------------------------------------')
            print('Examples:')
            print('1. Running on MARCOS servers:')
            print(
                'python di.py -c wic -a cobshome,cobenzlabs -v "FGE_S0252_0001"'
            )
            print('      -s "POS1_N432_0001" -j 0002 -b 2014-01-01')
            print(
                '      -w /media/DAE2-4808/archive -d 3 -i 64 -t wic -p H1,A7,A2,A16'
            )
            print(
                '      -y di,di,di,autodif -z False,179.8978,180.1391,267.3982'
            )
            print('2. Running it with manually provided data links:')
            print(
                'python di.py -c wic -a /media/DAE2-4808/archive/WIC/DI/analyze'
            )
            print(
                '      -v "FGE_S0252_0001" -s "POS1_N432_0001" -j 0002 -b 2014-02-01'
            )
            print(
                '      -e 2014-05-01 -w /media/DAE2-4808/archive -d 3 -i 64 -t wic'
            )
            print('      -p H1,A7,A2,A16 -y di,di,di,autodif -r')
            print('      -z False,179.8978,180.1391,267.3982 -u user:group')
            print('python di.py -c cobs -a cobshomepage,cobenzlabs ')
            print(
                '      -v DIDD_3121331_0002,LEMI025_1_0002 -s DIDD_3121331_0002'
            )
            print('      -j 0001,0001 -b 2014-10-01 -e 2014-10-07')
            print('      -w /srv/archive -d 3 -i 64 -t wik -p D -n -r')
            sys.exit()
        elif opt in ("-c", "--creddb"):
            creddb = arg
        elif opt in ("-a", "--dipath"):
            dipath = arg
        elif opt in ("-w", "--archive"):
            archive = arg
        elif opt in ("-f", "--identifier"):
            identifier = arg
        elif opt in ("-v", "--variolist"):
            variolist = arg.split(',')
        elif opt in ("-j", "--variodataidlist"):
            variodataidlist = arg.split(',')
        elif opt in ("-s", "--scalarlist"):
            scalarlist = arg.split(',')
        elif opt in ("-k", "--scalardataidlist"):
            scalardataidlist = arg.split(',')
        elif opt in ("-o", "--variopath"):
            fallbackvariopath = arg
        elif opt in ("-m", "--compensation"):
            compensation = True
        elif opt in ("-q", "--rotation"):
            rotation = True
        elif opt in ("-l", "--scalarpath"):
            fallbackscalarpath = arg
        elif opt in ("-b", "--begin"):
            begin = arg
        elif opt in ("-e", "--end"):
            end = arg
        elif opt in ("-t", "--stationid"):
            stationid = arg
        elif opt in ("-p", "--pierlist"):
            pierlist = arg.split(',')
        elif opt in ("-z", "--azimuthlist"):
            azimuthlist = arg.split(',')
        elif opt in ("-y", "--abstypelist"):
            abstypelist = arg.split(',')
        elif opt in ("-x", "--webdir"):
            webdir = arg
        elif opt in ("-u", "--user"):
            user = arg.split(':')
            if len(user) > 1:
                defaultuser = user[0]
                defaultgroup = user[1]
        elif opt in ("-d", "--expectedD"):
            try:
                expD = float(arg)
            except:
                print("expected declination needs to be a float")
                sys.exit()
        elif opt in ("-i", "--expectedI"):
            try:
                expI = float(arg)
            except:
                print("expected inclination needs to be a float")
                sys.exit()
        elif opt in ("-n", "--add2db"):
            dbadd = True
            addBLVdb = True
        elif opt in ("-g", "--flag"):
            flagging = True
        elif opt in ("--keepremote"):
            keepremote = True
        elif opt in ("-r", "--createarchive"):
            createarchive = True
        elif opt in ("-D", "--debug"):
            debug = True

    print("-------------------------------------")
    print("Starting di analysis ... MARTAS version {}".format(__version__))
    print("-------------------------------------")

    if dipath == '':
        print(' Specify the path to the DI data: -a /path/to/my/data !')
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    if archive == '':
        print(
            ' Specify an Archive path for writing results: -w /path/to/my/archive !'
        )
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    if variolist == '':
        variolist = []
    if scalarlist == '':
        scalarlist = []
    if azimuthlist == '':
        azimuthlist = []
    if abstypelist == '' or len(abstypelist) == 0:
        abstypelist = ['di' for elem in pierlist]
    if variodataidlist == '':
        variodataidlist = []
    if scalardataidlist == '':
        scalardataidlist = []
    if len(variodataidlist) == 0:
        variodataidlist = ['0002' for elem in variolist]
    else:
        if not len(variolist) == len(variodataidlist):
            print(
                ' You need to specify a specific DataID for each variometer: e.g. -j 0002,0001'
            )
            print(
                ' -- check dianalysis.py -h for more options and requirements')
            sys.exit()
    if len(scalardataidlist) == 0:
        scalardataidlist = ['0002' for elem in scalarlist]
    else:
        if not len(scalarlist) == len(scalardataidlist):
            print(
                ' You need to specify a specific DataID for each variometer: e.g. -j 0002,0001'
            )
            print(
                ' -- check dianalysis.py -h for more options and requirements')
            sys.exit()

    if not len(abstypelist) == 0:
        if not len(abstypelist) == len(pierlist):
            print(
                ' Abstypelist needs to have the same order and length of the pierlist'
            )
            print(
                ' -- check dianalysis.py -h for more options and requirements')
            sys.exit()

    try:
        test = datetime.strptime(begin, "%Y-%m-%d")
        print(test)
    except:
        print(' Date format for begin seems to be wrong: -b 2013-11-22')
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    try:
        datetime.strptime(end, "%Y-%m-%d")
    except:
        print(' Date format for end seems to be wrong: -e 2013-11-22')
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    if pierlist == []:
        print(
            ' Specify a list of the measurement piers containing at list one element: -p [Pier2]'
        )
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    if not len(azimuthlist) == 0:
        if not len(azimuthlist) == len(pierlist):
            print(
                ' Azimuthlist needs to have the same order and length of the pierlist'
            )
            print(
                ' -- check dianalysis.py -h for more options and requirements')
            sys.exit()

    if stationid == '':
        print(' Specify a station name e.g. your observatory code')
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()
    else:
        stationid = stationid.upper()

    if not creddb == '':
        print("  Accessing data bank ...")
        try:
            db = mysql.connect(host=mpcred.lc(creddb, 'host'),
                               user=mpcred.lc(creddb, 'user'),
                               passwd=mpcred.lc(creddb, 'passwd'),
                               db=mpcred.lc(creddb, 'db'))
            print("  ... success")
        except:
            print("  ... failure - check your credentials")
            sys.exit()
    else:
        db = False

    if not fallbackvariopath == '':
        if not fallbackvariopath.endswith('*'):
            fallbackvariopath = os.path.join(fallbackvariopath, '*')
        variopath = fallbackvariopath

    if not fallbackscalarpath == '':
        if not fallbackscalarpath.endswith('*'):
            fallbackscalarpath = os.path.join(fallbackscalarpath, '*')
        scalarpath = fallbackscalarpath

    if variolist == []:
        if fallbackvariopath == '':
            print(
                '  !! You have not provided any variometer information at all')

    if scalarlist == []:
        if fallbackscalarpath == '':
            print('  You have not provided any independent scalar information')
            print(
                '  -> we asume this data is provided along with the DI files')

    # -----------------------------------------------------
    # a) Basic information
    # -----------------------------------------------------
    if debug:
        print(" -------------------------------------")
        print(" Archive", archive)
        print(" Variolist", variolist)
        print(" Abstypelist", abstypelist)
        print(" Dipath", dipath)

    # -----------------------------------------------------
    # b) Getting new raw data from the input server
    # -----------------------------------------------------
    print(" -------------------------------------")
    print(" Identifying DI data source")
    if not os.path.exists(dipath):
        print("  Checking given DI path for credential information ...")
        try:
            credlist = mpcred.sc()
            credshort = [elem[0] for elem in credlist]
            print("  ... found credentials")
        except:
            print(
                " dipath {} not existing - credentials not accessible - aborting"
                .format(dipath))
            sys.exit()
        try:
            dic = dipath.split(',')
            print(dic)
            print(len(dic))
            if len(dic) == 2:
                remotecred = dic[0]
                remotepath = dic[1]
                print(
                    "  Using credentials {} to get DI data from the remote path {}"
                    .format(dic[0], dic[1]))
            elif len(dic) == 1:
                remotecred = dic[0]
                remotepath = ''
                print("  Using credentials {} to get DI data".format(dic[0]))
            else:
                print(
                    "  -> could not interprete dipath in terms of credential information"
                )
                sys.exit()
            if remotecred in credshort:
                getremote = True
            else:
                print(
                    "  -> dipath %s not existing - credentials not existing - aborting"
                    % dipath)
        except:
            print(
                "  -> dipath %s not existing - credentials not existing - aborting"
                % dipath)
            sys.exit()
        if getremote == False:
            sys.exit()
    else:
        print("  Found directory at specified dipath location")

    # Getting data from the webdir (eventually edited and corrected)
    if createarchive and not webdir == '':
        print(" -------------------------------------")
        print(" Createarchive and webdir selected ...")
        dipath = os.path.join(archive, stationid, 'DI', 'analyze')
        for pier in pierlist:
            diid = pier + '_' + stationid + '.txt'
            for infile in iglob(os.path.join(webdir, '*' + diid)):
                # Testing whether file exists:
                if os.path.exists(
                        os.path.join(dipath,
                                     os.path.split(infile)[1])):
                    print("  Deleting:",
                          os.path.join(dipath,
                                       os.path.split(infile)[1]))
                    os.remove(os.path.join(dipath, os.path.split(infile)[1]))
                print("  Retrieving from webdir: ", infile)
                shutil.copy(infile, dipath)
                # Setting permission to defaultuser even if started the job
                uid = pwd.getpwnam(defaultuser)[2]
                gid = grp.getgrnam(defaultgroup)[2]
                os.chown(os.path.join(dipath,
                                      os.path.split(infile)[1]), uid, gid)
                # Deleting file from web dir
                try:
                    os.remove(os.path.join(webdir, os.path.split(infile)[1]))
                except:
                    print("  !! No permissions to modify webdirectory")
                    pass

    # copy all files from web directory to the analysis folder
    if getremote:
        delete = True
        if keepremote:
            delete = False
        print(" -------------------------------------")
        print(
            " Getting remote data - deleting downloaded data from remote source set to {}"
            .format(delete))
        dipath = os.path.join(archive, stationid, 'DI', 'analyze')
        for pier in pierlist:
            if not os.path.exists(dipath):
                os.makedirs(dipath)
            diid = pier + '_' + stationid + '.txt'
            try:
                port = mpcred.lc(remotecred, 'port')
            except:
                port = 21
            ftpget(mpcred.lc(remotecred, 'address'),
                   mpcred.lc(remotecred, 'user'),
                   mpcred.lc(remotecred, 'passwd'),
                   remotepath,
                   os.path.join(archive, stationid, 'DI', 'analyze'),
                   diid,
                   port=port,
                   delete=delete)

    if debug:
        print(" -------------------------------------")
        print(
            " DI data defined and collected - now starting the analysis for variometer: {}"
            .format(variolist))

    print(" ")

    # -----------------------------------------------------
    # c) analyze all files in the local analysis directory and put successfully analyzed data to raw
    # -----------------------------------------------------
    for pier in pierlist:
        print("######################################################")
        print("Starting analysis for pier ", pier)
        print("######################################################")
        abspath = dipath
        diid = pier + '_' + stationid + '.txt'
        for vario in variolist:
            dataid = variodataidlist[variolist.index(vario)]
            if os.path.exists(
                    os.path.join(archive, stationid, vario,
                                 vario + '_' + dataid)):
                variopath = os.path.join(archive, stationid, vario,
                                         vario + '_' + dataid, vario + '*')
            else:
                variopath = vario
                if not os.path.exists(variopath):
                    print(
                        " -> No variometerdata found in the specified paths/IDs - using dummy path"
                    )
                    variopath = '/tmp/*'
            print(" -> Using Variometerdata at:", variopath)
            for scalar in scalarlist:
                # Define paths for variometer and scalar data
                scalarid = scalardataidlist[scalarlist.index(scalar)]
                if os.path.exists(
                        os.path.join(archive, stationid, scalar,
                                     scalar + '_' + scalarid)):
                    scalarpath = os.path.join(archive, stationid, scalar,
                                              scalar + '_' + scalarid,
                                              scalar + '*')
                else:
                    scalarpath = scalar
                    if not os.path.exists(scalarpath):
                        print(
                            " -> No scalar data found in the specified paths/IDs - using dummy path"
                        )
                        scalarpath = '/tmp/*'
                print(" -> Using Scalar data at:", scalarpath)
                # ALPHA and delta needs to be provided with the database

                print(" -------------------------------------")
                print(
                    " Extracting delta and rotation parameters ... should not be necessary as this is be done by absoluteAnalysis provided a database is connected"
                )
                deltaF = 0.0
                alpha = 0.0
                beta = 0.0
                """
                if db:
                    print(" ")
                    alpha =  dbgetfloat(db, 'DATAINFO', vario, 'DataSensorAzimuth')
                    if not isNumber(alpha):
                        alpha = 0.0
                    beta =  dbgetfloat(db, 'DATAINFO', vario, 'DataSensorTilt')
                    if not isNumber(beta):
                        beta = 0.0
                    deltaF =  dbgetfloat(db, 'DATAINFO', scalar, 'DataDeltaF')
                    if not isNumber(deltaF):
                        deltaF = 0.0
                else:
                    # eventually add an input option
                    # load a scalar file from path and get delta F from header
                    try:
                        scal = read(scalarpath,starttime=begin,endtime=begin)
                        try:
                            scal = applyDeltas(db,scal)
                            deltaF = 0.0
                        except:
                            deltaF = scal.header['DataDeltaF']
                    except:
                        deltaF = 0.0
                    try:
                        var = read(variopath,starttime=begin,endtime=begin)
                        try:
                            var = applyDeltas(db,var)
                        except:
                            pass
                        # TODO this is wrong -> but clarify whether a correction is necessary at all 
                        alpha = var.header['DataSensorAzimuth']
                        beta = var.header['DataSensorTilt']
                    except:
                        alpha = 0.0
                        beta = 0.0
                print("using alpha, beta, deltaF:", alpha, beta, deltaF)
                """

                print(" -------------------------------------")
                print(
                    " Extracting azimuth data ... should be contained in DI files, can be provided as option, is contained in PIERS table of DB"
                )
                # Azimuths are usually contained in the DI files
                ## Eventually overriding azimuths in DI files
                if len(azimuthlist) > 0:
                    azimuth = azimuthlist[pierlist.index(pier)]
                    if azimuth == 'False' or azimuth == 'false':
                        azimuth = False
                else:
                    azimuth = False
                if azimuth:
                    print(
                        " -> Overriding (eventual) DI files data with an azimuth of {} deg"
                        .format(azimuth))
                else:
                    print(" -> Using azimuth from DI file")
                if len(abstypelist) > 0:
                    abstype = abstypelist[pierlist.index(pier)]
                    if abstype == 'False' or abstype == 'false':
                        abstype = False
                else:
                    abstype = False
                if abstype:
                    print(" -> Selected type of absolute measurements is {}".
                          format(abstype))
                else:
                    print(" -> Absolute measurement type taken from DI file")
                # TODO ... Get azimuth data from PIERS table
                if db:
                    print(
                        " Checking azimuth in PIERS table of the database ...")
                    val = dbselect(db, 'AzimuthDictionary', 'PIERS',
                                   'PierID like "{}"'.format(pier))[0]
                    print("Found ", val)

                print(" -------------------------------------")
                movetoarchive = False
                if createarchive and variolist.index(
                        vario) == len(variolist) - 1 and scalarlist.index(
                            scalar) == len(scalarlist) - 1:
                    print(
                        " Running analysis - and moving successfully analyzed files to raw directory"
                    )
                    movetoarchive = os.path.join(archive, stationid, 'DI',
                                                 'raw')
                else:
                    print(
                        " Running analysis - and keeping files in analyze directory"
                    )
                absstream = absoluteAnalysis(abspath,
                                             variopath,
                                             scalarpath,
                                             expD=expD,
                                             expI=expI,
                                             diid=diid,
                                             stationid=stationid,
                                             abstype=abstype,
                                             azimuth=azimuth,
                                             pier=pier,
                                             alpha=alpha,
                                             deltaF=deltaF,
                                             starttime=begin,
                                             endtime=end,
                                             db=db,
                                             dbadd=dbadd,
                                             compensation=compensation,
                                             magrotation=rotation,
                                             movetoarchive=movetoarchive,
                                             deltaD=0.0000000001,
                                             deltaI=0.0000000001)
                print(" -> Done")

                # -----------------------------------------------------
                # d) write data to a file and sort it, write it again
                #          (workaround to get sorting correctly)
                # -----------------------------------------------------
                print(" -------------------------------------")
                if absstream and absstream.length()[0] > 0:
                    print(" Writing {} data line(s) ...".format(
                        absstream.length()[0]))
                    absstream.write(os.path.join(archive, stationid, 'DI',
                                                 'data'),
                                    coverage='all',
                                    mode='replace',
                                    filenamebegins=identifier + '_' + vario +
                                    '_' + scalar + '_' + pier)
                    try:
                        # Reload all data, delete old file and write again to get correct ordering
                        newabsstream = read(
                            os.path.join(
                                archive, stationid, 'DI', 'data', identifier +
                                '_' + vario + '_' + scalar + '_' + pier + '*'))
                        os.remove(
                            os.path.join(
                                archive, stationid, 'DI', 'data',
                                identifier + '_' + vario + '_' + scalar + '_' +
                                pier + '.txt'))  # delete file from hd
                        newabsstream.write(os.path.join(
                            archive, stationid, 'DI', 'data'),
                                           coverage='all',
                                           mode='replace',
                                           filenamebegins=identifier + '_' +
                                           vario + '_' + scalar + '_' + pier)
                    except:
                        print(" Stream apparently not existing...")
                    print(" -> Done")
                    if addBLVdb:
                        # SensorID necessary....
                        print(" Adding data to the data bank ... ")
                        #newabsstream.header["SensorID"] = vario
                        writeDB(db,
                                absstream,
                                tablename=identifier + '_' + vario + '_' +
                                scalar + '_' + pier)
                        #stream2db(db,newabsstream,mode='force',tablename=identifier+'_'+vario+'_'+scalar+'_'+pier)
                        print(" -> Done")

        # -----------------------------------------------------
        # f) get flags and apply them to data
        # -----------------------------------------------------
                    if db and flagging and addBLVdb:
                        newabsstream = readDB(
                            db, identifier + '_' + vario + '_' + scalar + '_' +
                            pier)
                        flaglist = db2flaglist(
                            db, identifier + '_' + vario + '_' + scalar + '_' +
                            pier)
                    elif addBLVdb:
                        newabsstream = readDB(
                            db, identifier + '_' + vario + '_' + scalar + '_' +
                            pier)
                        flaglist = []
                    if len(flaglist) > 0:
                        flabsstream = newabsstream.flag(flaglist)
                        #for i in range(len(flaglist)):
                        #    flabsstream = newabsstream.flag_stream(flaglist[i][2],flaglist[i][3],flaglist[i][4],flaglist[i][0],flaglist[i][1])
                        flabsstream.write(os.path.join(archive, stationid,
                                                       'DI', 'data'),
                                          coverage='all',
                                          filenamebegins=identifier + '_' +
                                          vario + '_' + scalar + '_' + pier)
                        pltabsstream = flabsstream.remove_flagged()

        # -----------------------------------------------------
        # h) fit baseline and plot
        # -----------------------------------------------------
                    try:
                        #pltabsstream = read(os.path.join(archive,stationid,'DI','data',identifier+'_'+vario+'_'+scalar+'_'+pier+'*'))
                        pltabsstream.trim(starttime=datetime.utcnow() -
                                          timedelta(days=380))
                        # fit baseline using the parameters defined in db (if parameters not available then skip fitting)
                        #absstream = absstream.fit(['dx','dy','dz'],poly,4)
                        savename = identifier + '_' + vario + '_' + scalar + '_' + pier + '.png'
                        #absstream = absstream.extract('f',98999,'<')
                        mp.plot(pltabsstream, ['dx', 'dy', 'dz'],
                                symbollist=['o', 'o', 'o'],
                                plottitle=vario + '_' + scalar + '_' + pier,
                                outfile=os.path.join(archive, stationid, 'DI',
                                                     'graphs', savename))
        #absstream.plot(['dx','dy','dz'],symbollist=['o','o','o'],plottitle=vario+'_'+scalar+'_'+pier,outfile=os.path.join(archive,stationid,'DI','graphs',savename))
                    except:
                        pass

    # -----------------------------------------------------
    # j) move files from analyze folder to web folder
    # -----------------------------------------------------
    # move only if createarchive is selected
    if createarchive:
        print(" -------------------------------------")
        print(
            " Dont mind the error message - works only if su at cron is running this job"
        )
        filelst = []
        for infile in iglob(
                os.path.join(archive, stationid, 'DI', 'analyze', '*.txt')):
            print("Processing ", infile)
            filelst.append(infile)
            destination = '/var/www/joomla/images/didaten/'
            infilename = os.path.split(infile)
            print(infilename)
            try:
                shutil.copy(infile, destination)
                #perform changes to privs
                if not webuser == '':
                    uid = pwd.getpwnam(webuser)[2]
                    gid = grp.getgrnam(webgroup)[2]
                    os.chown(os.path.join(destination, infilename[1]), uid,
                             gid)
            except:
                print("Webdir not accessible - finishing")
                pass

    print("----------------------------------------------------------------")
    print("di.py app finished")
    print("----------------------------------------------------------------")
    print("SUCCESS")