Exemple #1
0
def connectclient(broker='localhost',
                  port=1883,
                  timeout=60,
                  credentials='',
                  user='',
                  password='',
                  qos=0):
    """
    connectclient method
    used to connect to a specific client as defined by the input variables
    eventually add multiple client -> {"clients":[{"broker":"192.168.178.42","port":"1883"}]} # json type
                import json
                altbro = json.loads(altbrocker)
        """
    clientid = "{}".format(broker)
    client = mqtt.Client(clientid, False)
    # Authentication part
    if not credentials in ['', '-']:
        # use user and pwd from credential data if not yet set
        if user in ['', None, 'None', '-']:
            user = mpcred.lc(credentials, 'user')
        if password in ['', '-']:
            password = mpcred.lc(credentials, 'passwd')
    if not user in ['', None, 'None', '-']:
        #client.tls_set(tlspath)  # check http://www.steves-internet-guide.com/mosquitto-tls/
        client.username_pw_set(
            user, password=password
        )  # defined on broker by mosquitto_passwd -c passwordfile user
    client.on_connect = on_connect
    # on message needs: stationid, destination, location
    client.on_message = on_message
    client.connect(broker, port, timeout)
    return client
Exemple #2
0
    def __init__(self, client, sensordict, confdict):
        self.client = client #self.wsMcuFactory = wsMcuFactory
        self.sensordict = sensordict
        self.confdict = confdict
        self.count = 0  ## counter for sending header information
        self.sensor = sensordict.get('sensorid')
        self.hostname = socket.gethostname()
        self.printable = set(string.printable)
        self.datalst = []
        self.datacnt = 0
        self.metacnt = 10

        self.sensorlist = []
        self.revision = self.sensordict.get('revision','')
        try:
            self.requestrate = int(self.sensordict.get('rate','-'))
        except:
            self.requestrate = 30

        self.deltathreshold = confdict.get('timedelta')

        # debug mode
        debugtest = confdict.get('debug')
        self.debug = False
        if debugtest == 'True':
            log.msg('     DEBUG - {}: Debug mode activated.'.format(self.sensordict.get('protocol')))
            self.debug = True    # prints many test messages
        else:
            log.msg('  -> Debug mode = {}'.format(debugtest))

        # QOS
        self.qos=int(confdict.get('mqttqos',0))
        if not self.qos in [0,1,2]:
            self.qos = 0
        log.msg("  -> setting QOS:", self.qos)

        # Database specific
        self.db = self.sensor
        # get existing sensors for the relevant board
        log.msg("  -> IMPORTANT: MySQL assumes that database credentials ")
        log.msg("     are saved locally using magpy.opt.cred with the same name as database")
        try:
            self.db = mdb.mysql.connect(host=mpcred.lc(self.sensor,'host'),user=mpcred.lc(self.sensor,'user'),passwd=mpcred.lc(self.sensor,'passwd'),db=self.sensor)
            self.connectionMade(self.sensor)
        except:
            self.connectionLost(self.sensor,"Database could not be connected - check existance/credentials")
            return

        sensorlist = self.GetDBSensorList(self.db, searchsql='')
        self.sensor = ''
        existinglist = acs.GetSensors(confdict.get('sensorsconf'),identifier='$')

        # if there is a sensor in existinglist which is not an active sensor, then drop it
        for sensdict in existinglist:
            if sensdict.get('sensorid','') in sensorlist:
                self.sensorlist.append(sensdict) 
        
        self.lastt = [None]*len(self.sensorlist)
Exemple #3
0
def ConnectDB(creddb):
    print("  Accessing data bank ...")
    try:
        db = mysql.connect(host=mpcred.lc(creddb, 'host'),
                           user=mpcred.lc(creddb, 'user'),
                           passwd=mpcred.lc(creddb, 'passwd'),
                           db=mpcred.lc(creddb, 'db'))
        print("  ... success")
        return (db)
    except:
        print("  ... failure - check your credentials")
        sys.exit()
Exemple #4
0
def ConnectDB(dbcred):
    # Connect to test database
    # ------------------------------------------------------------
    dbname = mpcred.lc(dbcred,'db')
    dbhost = mpcred.lc(dbcred,'host')
    dbpasswd = mpcred.lc(dbcred,'passwd')
    dbuser = mpcred.lc(dbcred,'user')
    print (dbname,dbhost,dbuser)
    try:
        print("Connecting to DATABASE...")
        db = mpdb.mysql.connect(host="localhost",user=dbuser,passwd=dbpasswd,db=dbname)
        print("... success")
    except:
        print("... failed")
        db = None
    return db
Exemple #5
0
def connectDB(cred, exitonfailure=True, report=True):

    if report:
        print("  Accessing data bank... ")
    try:
        db = mysql.connect(host=mpcred.lc(cred, 'host'),
                           user=mpcred.lc(cred, 'user'),
                           passwd=mpcred.lc(cred, 'passwd'),
                           db=mpcred.lc(cred, 'db'))
        if report:
            print("   -> success. Connected to {}".format(mpcred.lc(
                cred, 'db')))
    except:
        if report:
            print("   -> failure - check your credentials / databank")
        if exitonfailure:
            sys.exit()

    return db
Exemple #6
0
def CheckConfiguration(config={}, debug=False):
    """
    DESCRIPTION
        configuration data will be checked
    """

    user = ''
    password = ''
    address = ''
    destination = ''
    source = ''
    port = 21
    success = True

    if debug:
        print("  Checking configuration data")

    if config.get('rawpath') == '' and creddb == '':
        print(
            'Specify either a shortcut to the credential information of the database or a local path:'
        )
        print('-- check collectfile.py -h for more options and requirements')
        success = False
        #sys.exit()
    if config.get('rawpath') == '':
        destination = tempfile.gettempdir()
    else:
        if not os.path.isdir(config.get('rawpath')):
            print("Destination directory {} not existing. Creating it".format(
                config.get('rawpath')))
            os.makedirs(config.get('rawpath'))
        destination = config.get('rawpath')
    config['destination'] = destination

    credtransfer = config.get('sourcecredentials')
    if not credtransfer == '':
        if debug:
            print("   - checking credentials for remote access")
        user = mpcred.lc(credtransfer, 'user')
        password = mpcred.lc(credtransfer, 'passwd')
        address = mpcred.lc(credtransfer, 'address')
        try:
            port = int(mpcred.lc(credtransfer, 'port'))
        except:
            port = 21
        if debug:
            print("   -> done")
    config['rmuser'] = user
    config['rmpassword'] = password
    config['rmaddress'] = address
    config['rmport'] = port

    source = ''
    protocol = config.get('protocol')
    if not protocol in ['', 'ftp', 'FTP']:
        source += protocol + "://"
        if not user == '' and not password == '':
            source += user + ":" + password + "@"
        if not address == '':
            source += address

    remotepath = config.get('sourcedatapath')
    if not remotepath == '':
        source += remotepath
    config['source'] = source

    if not protocol in ['', 'scp', 'ftp', 'SCP', 'FTP', 'html', 'rsync']:
        print('Specify a valid protocol:')
        print('-- check collectfile.py -h for more options and requirements')
        success = False
        #sys.exit()

    walk = config.get('walksubdirs')
    if debug:
        print("   Walk through subdirs: {}".format(walk))
    if GetBool(walk):
        if not protocol in ['', 'scp', 'rsync']:
            print(
                '   -> Walk mode only works for local directories and scp access.'
            )
            print('   -> Switching walk mode off.')
            config['walksubdirs'] = False

    creddb = config.get('dbcredentials')
    if not creddb == '':
        print("   Accessing data bank ...")
        # required for either writeing to DB or getting meta in case of writing archive
        try:
            db = mysql.connect(host=mpcred.lc(creddb, 'host'),
                               user=mpcred.lc(creddb, 'user'),
                               passwd=mpcred.lc(creddb, 'passwd'),
                               db=mpcred.lc(creddb, 'db'))
            print("   -> success")
        except:
            print("   -> failure - check your credentials")
            db = None
            success = False
            #sys.exit()
    config['db'] = db

    # loaded all credential (if started from root rootpermissions are relquired for that)
    # now switch user for scp
    # TODO check whether this is working in a function
    if config.get('defaultuser'):
        try:
            uid = pwd.getpwnam(config.get('defaultuser'))[2]
            os.setuid(uid)
        except:
            print("  User {} not existing -  moving on".format(
                config.get('defaultuser')))

    dateformat = config.get('dateformat')
    filename = config.get('filenamestructure')

    if dateformat == "" and filename == "":
        print(
            '   Specify either a fileformat: -f myformat.dat or a dateformat -d "%Y",ctime !'
        )
        print(
            '   -- check collectfile.py -h for more options and requirements')
        success = False
        #sys.exit()
    if not dateformat in ['', 'ctime', 'mtime']:
        current = datetime.utcnow()
        try:
            newdate = datetime.strftime(current, dateformat)
        except:
            print('   Specify a vaild datetime dateformat like "%Y-%m-%d"')
            print(
                '   -- check collectfile.py -h for more options and requirements'
            )
            success = False
            #sys.exit()
    if "%s" in filename and dateformat in ['', 'ctime', 'mtime']:
        print(
            '   Specify a datetime dateformat for given placeholder in fileformat!'
        )
        print(
            '   -- check collectfile.py -h for more options and requirements')
        success = False
        #sys.exit()
    elif not "%s" in filename and "*" in filename and not dateformat in [
            'ctime', 'mtime'
    ]:
        print(
            '   Specify either ctime or mtime for dateformat to be used with your give fileformat!'
        )
        print(
            '   -- check collectfile.py -h for more options and requirements')
        success = False
        #sys.exit()
    elif not "%s" in filename and not "*" in filename and not dateformat in [
            ""
    ]:
        print('   Give dateformat will be ignored!')
        print(
            '   -- check collectfile.py -h for more options and requirements')
        print('   -- continuing ...')

    if debug:
        print("  => Configuration checked - success")

    return config, success
Exemple #7
0
name = "{}-DataProducts-magnetism".format(sn)

try:
    from magpy.opt.analysismonitor import *
    analysisdict = Analysismonitor(
        logfile='/home/cobs/ANALYSIS/Logs/AnalysisMonitor_cobs.log')
    analysisdict = analysisdict.load()
except:
    print("Analysis monitor failed")
    pass

# ################################################
#             Database connection
# ################################################

dbpasswd = mpcred.lc('cobsdb', 'passwd')
try:
    # Test MARCOS 1
    print("Connecting to primary MARCOS...")
    db = mysql.connect(host="138.22.188.195",
                       user="******",
                       passwd=dbpasswd,
                       db="cobsdb")
except:
    print("... failed")
    try:
        # Test MARCOS 2
        print("Connecting to secondary MARCOS...")
        db = mysql.connect(host="138.22.188.191",
                           user="******",
                           passwd=dbpasswd,
Exemple #8
0
import magpy.opt.cred as mpcred

ippath = '/home/cobs/MARTAS/Logs/ip.txt'

last = open(ippath, 'r')
lastip = last.read()
last.close()

#sendcmd = 'ifconfig > ' + ippath
sendcmd = 'wget -qO- http://ipecho.net/plain > ' + ippath
subprocess.call(sendcmd, shell=True)

cred = 'zamg'
sendpath = '/data/station1'

address = mpcred.lc(cred, 'address')
user = mpcred.lc(cred, 'user')
passwd = mpcred.lc(cred, 'passwd')
port = 21
pathtolog = '/tmp/magpytransfer.log'

current = open(ippath, 'r')
currentip = current.read()
current.close()

if currentip != lastip:
    print "IP has changed. Sending..."
    ftpdatatransfer(localfile=ippath,
                    ftppath=sendpath,
                    myproxy=address,
                    port=port,
Exemple #9
0
def main(argv):
    ##
    ## Run like: python acquisition.py -m '/home/cobs/MARTAS/defaults.conf'

    global now
    global hostname
    global msgcount
    global SUPPORTED_PROTOCOLS

    passive_count = 0
    active_count = 0
    martasfile = 'martas.cfg'
    cred = ''
    creduser = ''
    credhost = ''
    pwd = 'None'

    ##  Get eventually provided options
    ##  ----------------------------
    usagestring = 'acquisition.py -m <martas> -c <credentials> -P <password>'
    try:
        opts, args = getopt.getopt(argv, "hm:c:P:U", [
            "martas=",
            "credentials=",
            "password="******"debug=",
        ])
    except getopt.GetoptError:
        print('Check your options:')
        print(usagestring)
        sys.exit(2)

    for opt, arg in opts:
        if opt == '-h':
            print('------------------------------------------------------')
            print('Usage:')
            print(usagestring)
            print('------------------------------------------------------')
            print('Options:')
            print('-h                             help')
            print(
                '-m                             path to martas configuration')
            print(
                '-c                             credentials, if authentication is used'
            )
            print(
                '-P                             alternatively provide password'
            )
            print('------------------------------------------------------')
            print('Examples:')
            print('1. Basic (using defauilt martas.cfg')
            print('   python acquisition.py')
            print('2. Using other configuration')
            print('   python acquisition.py -m "/home/myuser/mymartas.cfg"')
            sys.exit()
        elif opt in ("-m", "--martas"):
            martasfile = arg
        elif opt in ("-c", "--credentials"):
            cred = arg
        elif opt in ("-P", "--password"):
            pwd = arg

    ##  Load defaults dict
    ##  ----------------------------
    conf = acs.GetConf(martasfile)
    # Add a ceck routine here whether conf information was obtained

    broker = conf.get('broker')
    mqttport = int(conf.get('mqttport'))
    mqttdelay = int(conf.get('mqttdelay'))

    ##  Get Sensor data
    ##  ----------------------------
    sensorlist = acs.GetSensors(conf.get('sensorsconf'))

    ## Check for credentials
    ## ----------------------------
    if not cred == '':
        try:
            print("Accessing credential information for {}".format(cred))
            credpath = conf.get('credentialpath', None)
            credhost = mpcred.lc(cred, 'address', path=credpath)
            creduser = mpcred.lc(cred, 'user', path=credpath)
            pwd = mpcred.lc(cred, 'passwd', path=credpath)
        except:
            print("error when accessing credentials")
            pass

    ## create MQTT client
    ##  ----------------------------
    client = mqtt.Client(clean_session=True)
    user = conf.get('mqttuser', '')
    if not user in ['', '-', None, 'None']:
        # Should have two possibilities:
        # 1. check whether credentials are provided
        if not cred == '':
            if not creduser == user:
                print(
                    'User names provided in credentials ({}) and martas.cfg ({}) differ. Please check!'
                    .format(creduser, user))
                pwd = 'None'
        if pwd == 'None':
            # 2. request pwd input
            print('MQTT Authentication required for User {}:'.format(user))
            import getpass
            pwd = getpass.getpass()

        client.username_pw_set(username=user, password=pwd)

    ##  Start Twisted logging system
    ##  ----------------------------
    if conf.get('logging').strip() == 'sys.stdout':
        log.startLogging(sys.stdout)
    else:
        try:
            print(" -- Logging to {}".format(conf.get('logging')))
            log.startLogging(open(conf.get('logging'), 'a'))
            log.msg("----------------")
            log.msg("  -> Logging to {}".format(conf.get('logging')))
        except:
            log.startLogging(sys.stdout)
            print("Could not open {}. Switching log to stdout.".format(
                conf['logging']))

    log.msg("----------------")
    log.msg("Starting MARTAS acquisition version {}".format(__version__))
    log.msg("----------------")

    ## connect to MQTT client
    ##  ----------------------------
    client.on_connect = onConnect
    try:
        client.connect(broker, mqttport, mqttdelay)
        client.loop_start()
    except:
        log.msg(
            "Critical error - no network connection available during startup or mosquitto server not running - check whether data is recorded"
        )

    establishedconnections = {}
    ## Connect to serial port (sensor dependency) -> returns publish
    # Start subprocesses for each publishing protocol
    for sensor in sensorlist:
        log.msg("----------------")
        log.msg("Sensor and Mode:", sensor.get('sensorid'), sensor.get('mode'))
        log.msg("----------------")
        init = sensor.get('init')
        if not init in ['', 'None', None, 0, '-']:
            log.msg("  - Initialization using {}".format(init))
            initdir = conf.get('initdir')
            initapp = os.path.join(initdir, init)
            # Check if provided initscript is existing
            import subprocess
            try:
                log.msg("  - running initialization {}".format(initapp))
                initcall = "{} {}".format(sys.executable, initapp)
                #log.msg(subprocess.check_output(['/bin/sh',initapp]))
                log.msg(subprocess.check_output(initcall))
            except subprocess.CalledProcessError as e:
                log.msg(
                    "  - init command '{}' returned with error (code {}): {}".
                    format(e.cmd, e.returncode, e.output))
            except:
                pass
        if sensor.get('mode') in ['p', 'passive', 'Passive', 'P']:
            try:
                connected = PassiveThread(conf, sensor, client,
                                          establishedconnections)
                log.msg(
                    " - PassiveThread initiated for {}. Ready to receive data ..."
                    .format(sensor.get('sensorid')))
                establishedconnections.update(connected)
                passive_count += 1
            except:
                log.msg(" - !!! PassiveThread failed for {} !!!".format(
                    sensor.get('sensorid')))
                pass
        elif sensor.get('mode') in ['a', 'active', 'Active', 'A']:
            try:
                log.msg(
                    " - ActiveThread initiated for {}. Periodically requesting data ..."
                    .format(sensor.get('sensorid')))
                connected_act = ActiveThread(conf, sensor, client,
                                             establishedconnections)
            except:
                log.msg(" - !!! ActiveThread failed for {} !!!".format(
                    sensor.get('sensorid')))
                pass
        elif sensor.get('mode') in ['autonomous']:
            try:
                log.msg(
                    " - AutoThread initiated for {}. Ready to receive data ..."
                    .format(sensor.get('sensorid')))
                connected_act = AutoThread(conf, sensor, client,
                                           establishedconnections)
            except Exception as e:
                log.msg(" - !!! AutoThread failed for {} !!!".format(
                    sensor.get('sensorid')))
                log.msg(e)
                pass
        else:
            log.msg("acquisition: Mode not recognized")

        sensorid = sensor.get('sensorid')

    # Start all passive clients
    if passive_count > 0:
        log.msg(
            "acquisition: Starting reactor for passive sensors. Sending data now ..."
        )
        reactor.run()

    # TODO other solution - when the main thread stops, the deamons are killed!
    got_here = True
    print("this is the end of the main thread...")
    if got_here:
        import time
        while True:
            time.sleep(100)
Exemple #10
0
def main(argv):
    cred = ''
    path = ''
    remotepath = ''
    protocol = ''
    depth = 2
    increment = False
    extension = 'bin'
    dateformat = '%Y-%m-%d'
    compress = False
    try:
        opts, args = getopt.getopt(argv, "hc:l:r:s:d:i:e:f:z", [
            "cred=",
            "localpath=",
            "remotepath=",
            "protocol=",
            "depth=",
            "increment=",
            "extension=",
            "dateformat=",
        ])
    except getopt.GetoptError:
        print 'senddata.py -c <credentialshortcut> -s <protocol> -l <localpath> -r <remotepath> -d <depth> -i <increment> -e <extension> -f <dateformat> -z'
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print '-------------------------------------'
            print 'Description:'
            print 'Sending data to a remote host by scp or ftp.'
            print 'Requires existing credential information (see cred.py).'
            print '-------------------------------------'
            print 'Usage:'
            print 'senddata.py -c <credentialshortcut> -l <localpath> -r <remotepath> '
            print '  -s <protocol> -d <depth> -i <increment> -e <extension> -f <dateformat>'
            print '-------------------------------------'
            print 'Options:'
            print '-c (required): the credentials for the transfer protocol.'
            print '               Create using the addcred.py method. Use'
            print '               python addred.py -h for help on that.'
            print '-l (required): provide a local root path, all directories below will'
            print '               be scanned for *date.bin files'
            print '-r           : eventually provide a remote path like "/data"'
            print '-s (required): provide the protocol to be used (ftp or scp)'
            print '-d           : defines the amount of days to be send: 1 for today only,'
            print '               2 for the last two days and so on. d needs to be a integer'
            print '               with d > 1.'
            print '-i (experimental): defines incremental uploads. Loaded data is extracted by Magpy'
            print '               and i minutes are uploaded and appended to an existing file.'
            print '               This file needs to be unified later on.'
            print '-e           : provide a user defined extension, default is "bin"'
            print '-f           : provide a date format, default is "%Y-%m-%d"'
            print '-z           : compress file before sending'
            print '-------------------------------------'
            print 'Examples:'
            print 'python senddata.py -c zamg -s ftp -l /srv/ws/ -r /data'

            sys.exit()
        elif opt in ("-c", "--cred"):
            cred = arg
        elif opt in ("-l", "--localpath"):
            path = arg
        elif opt in ("-r", "--remotepath"):
            remotepath = arg
        elif opt in ("-s", "--protocol"):
            protocol = arg
        elif opt in ("-d", "--depth"):
            try:
                depth = int(arg)
                if not depth >= 1:
                    print "depth needs to be positve"
                    sys.exit()
            except:
                print "depth needs to be an integer"
                sys.exit()
        elif opt in ("-i", "--increment"):
            increment = arg
        elif opt in ("-e", "--extension"):
            extension = arg.strip('.')
        elif opt in ("-f", "--dateformat"):
            dateformat = arg
        elif opt in ("-z", "--compress"):
            compress = True

    if cred == '':
        print 'Specify a shortcut to credentials. '
        print '-- use addcred.py for this purpose.'
        print '-- check senddata.py -h for more options and requirements'
        sys.exit()
    if path == '':
        print 'Specify a base path.  '
        print '-- check senddata.py -h for more options and requirements'
        sys.exit()
    if protocol == '':
        print 'Specify a protocol (scp, ftp).  '
        print '-- check senddata.py -h for more options and requirements'
        sys.exit()

    # Test with missing information
    address = mpcred.lc(cred, 'address')
    user = mpcred.lc(cred, 'user')
    passwd = mpcred.lc(cred, 'passwd')
    port = mpcred.lc(cred, 'port')
    pathtolog = '/tmp/magpytransfer.log'

    if increment:
        depth = 1
    datelist = []
    current = datetime.utcnow()

    newcurrent = current
    for elem in range(depth):
        datelist.append(datetime.strftime(newcurrent, dateformat))
        newcurrent = current - timedelta(days=elem + 1)

    print datelist

    for date in datelist:
        for dirpath, dirnames, filenames in os.walk(path):
            for filename in [
                    f for f in filenames if f.endswith(date + "." + extension)
            ]:
                localfile = os.path.join(dirpath, filename)
                print "Sending ", localfile
                if compress:
                    print 'creating compressed archive'
                    zfile = os.path.join(dirpath,
                                         filename.strip(extension) + 'zip')
                    zf = zipfile.ZipFile(zfile, mode='w')
                    try:
                        zf.write(localfile,
                                 os.path.basename(localfile),
                                 compress_type=compression)
                    finally:
                        print 'closing'
                        zf.close()
                    localfile = zfile
                print "Sending ", localfile
                if protocol == 'ftp':
                    # Tested - working flawless (take care with address - should not contain ftp://)
                    ftpdatatransfer(localfile=localfile,
                                    ftppath=remotepath,
                                    myproxy=address,
                                    port=port,
                                    login=user,
                                    passwd=passwd,
                                    logfile=pathtolog,
                                    raiseerror=True)
                    pass
                elif protocol == 'scp':
                    # Tested - working flawless
                    scptransfer(
                        localfile, user + '@' + address + ':' + remotepath +
                        '/' + filename, passwd)
                    pass
                elif protocol == 'gin':
                    print "GIN not supported yet"
                    # Coming soon
                    sys.exit()
                else:
                    print "Unsupported protocol selected:"
                    print '-- Specify one among (scp, ftp).  '
                    print '-- check senddata.py -h for more options and requirements'
                    sys.exit()
                print "... success"
Exemple #11
0
def sendmail(dic):
    """
    Function for sending mails with attachments
    """

    #if not smtpserver:
    #    smtpserver = 'smtp.web.de'
    if 'Attach' in dic:
        files = map(lambda s: s.strip(), dic['Attach'].split(','))
    else:
        files = []
    if not dic['Text']:
        text = 'Cheers, Your Analysis-Robot'
    if not 'Subject' in dic:
        dic['Subject'] = 'Automatic message'
    if 'mailcred' in dic:
        ## import credential routine
        import magpy.opt.cred as cred
        #read credentials
        print("Reading credentials")
        dic['smtpserver'] = cred.lc(dic.get('mailcred'), 'smtp')
        dic['user'] = cred.lc(dic.get('mailcred'), 'user')
        dic['pwd'] = cred.lc(dic.get('mailcred'), 'passwd')
        #dic['port'] = cred.lc(dic.get('mailcred'),'port') ## port is currently not stored by addcred
    if 'port' in dic:
        port = int(dic['port'])
    else:
        port = None
    if 'user' in dic:
        user = dic['user']
    else:
        user = ''

    msg = MIMEMultipart()
    msg['From'] = dic['From']
    send_from = dic['From']
    #msg['To'] = COMMASPACE.join(send_to)
    msg['To'] = dic['To']
    if len(dic['To'].split(',')) > 1:
        send_to = list(map(lambda s: s.strip(), dic['To'].split(',')))
    else:
        send_to = dic.get('To').strip()
    msg['Date'] = formatdate(localtime=True)
    msg['Subject'] = dic['Subject']
    msg.attach(MIMEText(dic['Text']))

    # TODO log if file does not exist
    for f in files:
        part = MIMEBase('application', "octet-stream")
        part.set_payload(open(f, "rb").read())
        encoders.encode_base64(part)
        part.add_header('Content-Disposition',
                        'attachment; filename="%s"' % os.path.basename(f))
        msg.attach(part)

    # seems as if server name needs to be specified in py3.7 and 3.8, should work in older versions as well
    smtp = SMTP(dic.get('smtpserver'))
    smtp.set_debuglevel(False)
    if port:
        smtp.connect(dic.get('smtpserver'), port)
    else:
        smtp.connect(dic.get('smtpserver'))
    smtp.ehlo()
    if port == 587:
        smtp.starttls()
    smtp.ehlo()
    if user:
        smtp.login(user, dic.get('pwd'))
    smtp.sendmail(send_from, send_to, msg.as_string())
    smtp.close()
Exemple #12
0
#!/usr/bin/env python

"""
Skeleton for graphs
"""

from magpy.stream import *   
from magpy.database import *   
from magpy.transfer import *
import magpy.mpplot as mp
import magpy.opt.emd as emd
import magpy.opt.cred as mpcred

dbpasswd = mpcred.lc('cobsdb','passwd')

try:
    # Test MARCOS 1
    print "Connecting to primary MARCOS..."
    db = mysql.connect(host="localhost",user="******",passwd=dbpasswd,db="cobsdb")
    print db
except:
    print "... failed"
    try:
        # Test MARCOS 2
        print "Connecting to secondary MARCOS..."
        db = mysql.connect(host="138.22.188.191",user="******",passwd=dbpasswd,db="cobsdb")
        print db
    except:
        print "... failed -- aborting"
        sys.exit()
Exemple #13
0
from magpy.database import *
from magpy.transfer import *
import magpy.mpplot as mp
import magpy.opt.emd as emd
import magpy.opt.cred as mpcred

import itertools
from threading import Thread
import json

from martas import martaslog as ml
logpath = '/var/log/magpy/mm-dp-autodif.log'
sn = 'SAGITTARIUS'
statusmsg = {}

dbpasswd = mpcred.lc('cobsdb', 'passwd')
try:
    # Test MARCOS 1
    print("Connecting to primary MARCOS...")
    db = mysql.connect(host="138.22.188.195",
                       user="******",
                       passwd=dbpasswd,
                       db="cobsdb")
except:
    print "... failed"
    try:
        # Test MARCOS 2
        print("Connecting to secondary MARCOS...")
        db = mysql.connect(host="138.22.188.191",
                           user="******",
                           passwd=dbpasswd,
Exemple #14
0
def main(argv):
    creddb = ''
    credtransfer = ''
    user=''
    password= ''
    address = ''
    port = 21
    protocol = ''
    localpath = ''
    remotepath = ''
    sensorid = ''
    stationid = ''
    startdate = ''
    dateformat = ''
    depth = 1
    filename = ''
    db = False
    disableproxy=False
    zipping = False
    walk=False
    defaultuser = ''
    uppercase=False
    force = ''
    forcelist = ['0001','0002','0003','0004','0005','0006','0007','0008'] 
    debug = False
    try:
        opts, args = getopt.getopt(argv,"hc:e:l:r:p:s:t:b:d:a:f:Uowu:xm:z",["creddb=","credtransfer=","localpath=","remotepath=","protocol=","sensorid=","stationid=","startdate=","depth=","dateformat=", "filefomat=","debug=","disableproxy=","walk=","user="******"uppercase=","insert-table=","zip="])
    except getopt.GetoptError:
        print('collectfile.py -c <creddb> -e <credtransfer> -l <localpath> -r <remotepath> -p <protocol> -s <sensorid> -t <stationid> -b <startdate> -d <depth> -a <dateformat> -f <filefomat> -U <debug> -o <disableproxy=True> -w <walk=True> -u <user> -x <uppercase> -m <insert-table> -z <zip>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('-------------------------------------')
            print('Description:')
            print('collectfile.py reads data from various sources ')
            print('and uploads data to a data bank.')
            print('Filtering and archiving is done using "cleanup".')
            print('-------------------------------------')
            print('Usage:')
            print('collectfile.py -c <creddb> -e <credtransfer> -l <localpath> -r <remotepath> -p <protocol> -s <sensorid> -t <stationid> -b <startdate> -d <depth> -a <dateformat> -f <filefomat> -U <debug> -o <disableproxy=True> -w <walk=True> -u <user> -x <uppercase> -m <insert-table> -z <zip>')
            print('-------------------------------------')
            print('Options:')
            print('-c            : provide the shortcut to the data bank credentials')
            print('-e            : credentials for transfer protocol')
            print('-l            : localpath - if provided, raw data will be stored there.')
            print('              : two subdirectories will be created - stationID and sensorID')
            print('-r (required) : remotepath - path to the data to be collected')
            print('-p            : protocol of data access - required for ftp and scp')
            print('-s            : ID of the sensor (required if not contained in the data')
            print('                meta information)')
            print('-t            : ID of the station i.e. the Observatory code (required if')
            print('                not in meta data)')
            print('-b            : date to start with, like 2014-11-22, default is current day')
            print('-d            : depth: 1 means today, 2 today and yesterday, 3 last three days, etc')
            print('-a            : dateformat in files to be read')
            print('                like "%Y-%m-%d" for 2014-02-01')
            print('                     "%Y%m%d" for 20140201')
            print('                     "ctime" or "mtime" for using timestamp of file')
            print('                Check out pythons datetime function for more info')
            print('-f            : filename of data file to be read.') 
            print('                Add %s as placeholder for date')     
            print('                examples: "WIC_%s.bin"')
            print('                          "*%s*"')
            print('                          "WIC_%s.*"')
            print('                          "WIC_2013.all" - no dateformat -> single file will be read')
            print('-o (no input) : if selected any systems proxy settings are disabled')
            print('-w (no input) : if selected all subdirectories below remote path will be searched for')
            print('                filename pattern. Only works for local directories and scp.')     
            print('-u            : perform upload as this user - necessary for cron and other root jobs')
            print('                as root cannot use scp transfer.')     
            print('-x            : use uppercase for dateformat (e.g. NOV2014 instead of Nov2014)')
            print('-m            : force data to the given revision number.') 
            print('-z            : if option l is selected raw data will be zipped within localpath.') 
            print('-------------------------------------')
            print('Examples:')
            print('---------')
            print('1. get data from ftp server and add to database')
            print(' python collectfile.py -r "/data/magnetism/gam" -c cobsdb -e zamg -p ftp -t GAM ') 
            print('      -d 2 -f *%s.zip -a "%Y-%m-%d"')
            print('---------')
            print('2. get data from local directory and add to database')
            print('python collectfile.py -c cobsdb -r "/srv/data/"') 
            print('      -s LEMI036_1_0001 -t WIC -a "%Y-%m-%d" -f "LEMI036_1_0001_%s.bin" ')
            print('---------')
            print('3. get data from local directory and add to database, add raw data to archive')
            print('python collectfile.py -c cobsdb -r "/Observatory/archive/WIK/DIDD_3121331_0002/DIDD_3121331_0002_0001/" -s DIDD_3121331_0002 -t WIK -b "2012-06-01" -d 100 -a "%Y-%m-%d" -f "DIDD_3121331_0002_0001_%s.cdf" -l "/srv/archive"')
            print('---------')
            print('4. get data from remote by ssh and store in local archive')
            print('python collectfile.py -e phobostilt -r "/srv/gwr/" -p scp -s GWRSG_12345_0002 -t SGO -b "2012-06-01" -d 30 -a "%y%m%d" -f "G1%s.025" -l "/srv/archive"')
            print('---------')
            print('5. get recently created files from remote by ssh and store in local archive')
            print('python collectfile.py -e themisto -r "/srv/" -p scp -t SGO -d 2 -a ctime -l "/srv/archive"')
            print('---------')
            sys.exit()
        elif opt in ("-c", "--creddb"):
            creddb = arg
        elif opt in ("-e", "--credtransfer"):
            credtransfer = arg
        elif opt in ("-l", "--localpath"):
            localpath = arg
        elif opt in ("-p", "--protocol"):
            protocol = arg
        elif opt in ("-r", "--remotepath"):
            remotepath = arg
        elif opt in ("-s", "--sensorid"):
            sensorid = arg
        elif opt in ("-t", "--stationid"):
            stationid = arg
        elif opt in ("-d", "--depth"):
            try:
                depth = int(arg)
                if not depth >= 1:
                    print("depth needs to be positve") 
                    sys.exit()
            except:
                print("depth needs to be an integer") 
                sys.exit()
        elif opt in ("-a", "--dateformat"):
            dateformat = arg
        elif opt in ("-b", "--begin"):
            startdate = arg
        elif opt in ("-f", "--filename"):
            filename = arg
        elif opt in ("-o", "--option"):
            disableproxy=True
        elif opt in ("-w", "--walk"):
            walk=True
        elif opt in ("-u", "--user"):
            defaultuser = arg
        elif opt in ("-x", "--uppercase"):
            uppercase=True
        elif opt in ("-z", "--zip"):
            zipping=True
        elif opt in ("-m", "--insert-table"):
            force=arg
            if not force in forcelist:
                print ("-m: provided data revision number is not valid: should be 0001 or similar")
                force = ''
        elif opt in ("-U", "--debug"):
            debug = True

    ### ###############################################################################
    ###   1. Check input variables
    ### ###############################################################################

    if localpath == '' and creddb == '':
        print('Specify either a shortcut to the credential information of the database or a local path:')
        print('-- check collectfile.py -h for more options and requirements')
        sys.exit()
    if localpath == '':
        destination = tempfile.gettempdir()
    else:
        if not os.path.isdir(localpath):
            print ("Destination directory {} not existing. Creating it".format(localpath)) 
            os.makedirs(localpath)
        destination = localpath
    if not credtransfer == '':
        user=mpcred.lc(credtransfer,'user')
        password=mpcred.lc(credtransfer,'passwd')
        address = mpcred.lc(credtransfer,'address')
        try:
            port = int(mpcred.lc(credtransfer,'port'))
        except:
            port = 21
    source = ''
    if not protocol in ['','ftp','FTP']:
        source += protocol + "://"
        if not user == '' and not password=='':
            source += user + ":" + password + "@"
        if not address == '':
            source += address
    if not remotepath == '':
        source += remotepath

    if not protocol in ['','scp','ftp','SCP','FTP','html','rsync']:
        print('Specify a valid protocol:')
        print('-- check collectfile.py -h for more options and requirements')
        sys.exit()
    if walk:
        if not protocol in ['','scp','rsync']: 
            print(' Walk mode only works for local directories and scp access.')
            print(' Switching walk mode off.')
            walk = False

    if not creddb == '':
        print("Accessing data bank ...")
        try:
            db = mysql.connect (host=mpcred.lc(creddb,'host'),user=mpcred.lc(creddb,'user'),passwd=mpcred.lc(creddb,'passwd'),db =mpcred.lc(creddb,'db'))
            print("success")
        except:
            print("failure - check your credentials")
            sys.exit()

    # loaded all credential (if started from root rootpermissions are relquired for that)
    # now switch user for scp
    if not defaultuser == '':
        uid=pwd.getpwnam(defaultuser)[2]
        os.setuid(uid)

    if startdate == '':
        current = datetime.utcnow() # make that a variable
    else:
        current = DataStream()._testtime(startdate)

    if dateformat == "" and filename == "":
        print('Specify either a fileformat: -f myformat.dat or a dateformat -d "%Y",ctime !')
        print('-- check collectfile.py -h for more options and requirements')
        sys.exit()
    if not dateformat in ['','ctime','mtime']:
        try:
            newdate = datetime.strftime(current,dateformat)
        except:
            print('Specify a vaild datetime dateformat like "%Y-%m-%d"')
            print('-- check collectfile.py -h for more options and requirements')
            sys.exit()
    if "%s" in filename and dateformat in ['','ctime','mtime']:
        print('Specify a datetime dateformat for given placeholder in fileformat!')
        print('-- check collectfile.py -h for more options and requirements')
        sys.exit()
    elif not "%s" in filename and "*" in filename and not dateformat in ['ctime','mtime']:
        print('Specify either ctime or mtime for dateformat to be used with your give fileformat!')
        print('-- check collectfile.py -h for more options and requirements')
        sys.exit()
    elif not "%s" in filename and not "*" in filename and not dateformat in [""]:
        print('Give dateformat will be ignored!')
        print('-- check collectfile.py -h for more options and requirements')
        print('-- continuing ...')

    if debug:
        print("1.0 finished - Parameters OK")

    ### ###############################################################################
    ###   2. Create download/copy link  
    ### ###############################################################################

    ###  for all files conform with eventually provided datelist


    ###   2.1 Check dates  
    ### -------------------------------------

    ### the following parameters are used here:
    ### dateformat, filename

    ### dateformat:  string like ("%Y-%m-%d"), ("ctime", "mtime") or ""
    ### filename:    string like "*%s.bin" (-> requires dateformat[0] ) 
    ###              string like "*.bin" (-> requires dateformat[1] ) 
    ###              string like "myfile.bin" (-> requires dateformat[2] ) 
    ###              empty "" (-> searches either for dateformat[0] or takes all fitting with dateformat[2] ) 

    ###  -> see 1. check parameter

    ### make use of depth and begin to define timerange

    datelist = []
    newcurrent = current
    if not dateformat in ['','ctime','mtime']:
        for elem in range(depth):
            if dateformat == '%b%d%y': #exception for MAGREC
                newdate = datetime.strftime(newcurrent,dateformat)
                datelist.append(newdate.upper())
            else:
                datelist.append(datetime.strftime(newcurrent,dateformat))
            newcurrent = current-timedelta(days=elem+1)
    elif dateformat in ['ctime','mtime']:
        for elem in range(depth):
            datelist.append(newcurrent)
            newcurrent = current-timedelta(days=elem+1)
    else:
        datelist = ['dummy']

    #if debug:
    print(" - Dealing with time range:\n {}".format(datelist))

    ###   2.2 Select files from source meeting critera 
    ### -------------------------------------

    ### Define source based on 'protocol', 'remotepath', 'walk', 'option' and optionally 'sensorid'

    ### protocols: ''(local disk), 'scp', 'ftp', 'html'

    #filelist = getfilelist(protocol, source, sensorid, filename, datelist, walk=True, option=None)
    if debug:
        print("2.2 Starting - Getting filelists")

    filelist = []
    if protocol in ['ftp','FTP']:
        if debug:
            print (" - Getting filelist - by ftp ") 
        import ftplib
        if debug:
            print (" - connecting to {} on port {}".format(address,port)) 
        if not port == 21:
            ftp = ftplib.FTP()
            ftp.connect(address,port)
        else:
            ftp = ftplib.FTP(address)
        if debug:
            print (" - user: {} ".format(user)) 
        ftp.login(user,password)
        ftp.cwd(source)
        lines = []
        ftp.dir("", lines.append)
        ftp.close()
        for date in datelist:
            path = dir_extract(lines, filename, date, dateformat)
            if len(path) > 0:
                filelist.extend(path)
    elif protocol in ['scp','SCP','rsync']:
        if debug:
            print (" - Getting filelist - by ssh ")
        pwd_required=True
        if protocol == 'rsync':
            pwd_required=False
            print ("Rsync requires passwordless ssh connection to remote system")
        import pexpect
        if not dateformat in ['','ctime','mtime']:
            for date in datelist:
                path = ssh_getlist(remotepath, filename, date, dateformat, datetime.utcnow(), cred=[user,password,address],pwd_required=pwd_required)
                if len(path) > 0:
                    filelist.extend(path)
        else:
            filelist = ssh_getlist(remotepath, filename, min(datelist), dateformat, max(datelist), cred=[user,password,address],pwd_required=pwd_required)
    elif protocol == '':
        if debug:
            print (" - Getting filelist - from local directory ") 
        ### Search local directory - Working
        for date in datelist:
            path = walk_dir(source, filename, date, dateformat)
            if len(path) > 0:
                filelist.extend(path)
    elif protocol == 'html':
        print (filelist)
        sys.exit()

    if debug:
        print ("Result")
        print ("-----------------------------")
        print (filelist)

    ###   2.3 Get selected files and copy them to destination
    ### -------------------------------------
    ###
    ### only if not protocol == '' and localpath

    ### update filelist with new filenamens on local harddisk

    if debug:
        print("2.3 Writing data to a local directory (or tmp)")

    localpathlist = []

    if not protocol == '' or (protocol == '' and not destination == tempfile.gettempdir()):
        ### Create a directory by getting sensorid names (from source directory)
        def createdestinationpath(localpath,stationid,sensorid):
            subdir = 'raw'
            if not stationid and not sensorid:
                destpath = os.path.join(localpath)
            elif not stationid:
                destpath = os.path.join(localpath,sensorid,'raw')
            elif not sensorid:
                destpath = os.path.join(localpath,stationid.upper())
            else:
                destpath = os.path.join(localpath,stationid.upper(),sensorid,'raw')
            return destpath

        # Open the specific channel
        if protocol in ['ftp','FTP']:
            if not port == 21:
                ftp = ftplib.FTP()
                ftp.connect(address,port)
            else:
                ftp = ftplib.FTP(address)
            ftp.login(user,password)
            ftp.cwd(source)

        for f in filelist:
            path = os.path.normpath(f)
            li = path.split(os.sep)
            if not sensorid and not protocol in ['ftp','FTP']:
                if len(li) >= 2:
                    sensid = li[-2]
            elif not sensorid and protocol in ['ftp','FTP']:
                sensid = f.split('.')[0].rpartition('_')[0]
            else:
                sensid = sensorid

            destpath = createdestinationpath(destination,stationid,sensid)

            destname = os.path.join(destpath,li[-1])

            if not os.path.isdir(destpath):
                os.makedirs(destpath)
            if debug:
                print ("DESTINATION (for files):", destpath, li[-1])

            if protocol in ['ftp','FTP']:
                fhandle = open(destname, 'wb')
                ftp.retrbinary('RETR ' + f, fhandle.write)
                fhandle.close()
            elif protocol in ['scp','SCP']:
                scptransfer(user+'@'+address+':'+f,destpath,password,timeout=600)
            elif protocol in ['rsync']:
                # create a command line string with rsync ### please note,,, rsync requires password less comminuctaion
                rsyncstring = "rsync -avz -e ssh {} {}".format(user+'@'+address+':'+f,destpath)
                print ("Executing:", rsyncstring)
                subprocess.call(rsyncstring.split())
            elif protocol in ['html','HTML']:
                pass
            elif protocol in ['']:
                copyfile(f, destname)
            if zipping:
                if debug:
                    print (" raw data wil be zipped")
                dirname = os.path.dirname(destname)
                oldname = os.path.basename(destname)
                pname = os.path.splitext(oldname)
                if not pname[1] in [".zip",".gz",".ZIP",".GZ"]:
                    zipname = pname[0]+'.zip'
                    with zipfile.ZipFile(os.path.join(dirname,zipname), 'w') as myzip:
                        myzip.write(destname,oldname, zipfile.ZIP_DEFLATED)
                    os.remove(destname)
                    destname = os.path.join(dirname,zipname)
                else:
                    if debug:
                        print (" data is zipped already")
            localpathlist.append(destname)

        if protocol in ['ftp','FTP']:
            ftp.close()
    else:
        localpathlist = [elem for elem in filelist]


    ### ###############################################################################
    ###   3. Read local data and write to database  
    ### ###############################################################################

    ###  for all files conform with eventually provided datelist


    ###   3.1 Read local data
    ### -------------------------------------

    ###  flagging does not make sense 

    if db:
        if debug:
            print("3.1 Writing data to database")

        for f in localpathlist:
            data = read(f)

            if debug:
                print ("Dealing with {}. Length = {}".format(f,data.length()[0]))
                print ("SensorID in file: {}".format(data.header.get('SensorID')))

            statiddata = data.header.get('StationID','')
            if not stationid == '':
                if not statiddata == stationid and not statiddata == '':
                    print("StationID's from file and provided one (or dir) are different!")
                    print ("Using provided value")
                data.header['StationID'] = stationid
            else:
                if data.header.get('StationID','') == '':
                    print("Could not find station ID in datafile")
                    print("Please provide by using -t stationid")
                    sys.exit()
            if debug:
                print("Using StationID", data.header.get('StationID'))
            sensiddata = data.header.get('SensorID','')
            if not sensorid == '':
                if not sensiddata == sensorid and not sensiddata == '':
                    print("SensorID's from file and provided one (or dir) are different!")
                    print ("Using provided value")
                data.header['SensorID'] = sensorid
            else:
                if data.header.get('SensorID','') == '':
                    print("Could not find sensor ID in datafile")
                    print("Please provide by using -s sensorid")
                    sys.exit()
            if debug:
                print("Using SensorID", data.header.get('SensorID'))

            print("{}: Adding {} data points to DB now".format(data.header.get('SensorID'), data.length()[0]))

            if not len(data.ndarray[0]) > 0:
                data = data.linestruct2ndarray()  # Dealing with very old formats                   
            if len(data.ndarray[0]) > 0:
                if not force == '':
                    tabname = data.header.get('SensorID')+'_'+force
                    print (" - Force option chosen: forcing data to table {}".format(tabname))
                    writeDB(db,data, tablename=tabname)
                else:
                    writeDB(db,data)
                    pass
Exemple #15
0
 - magpy
 - sudo apt install percona-toolkit
 - main user (cobs) needs to be able to use sudo without passwd (add to /etc/sudoers)
"""
from __future__ import print_function

# Define packges to be used (local refers to test environment)
# ------------------------------------------------------------

from magpy.stream import *
from magpy.database import *
import magpy.opt.cred as mpcred

# Get password from cred
# ------------------------------------------------------------
dbpasswd = mpcred.lc('cobsdb', 'passwd')
dbuser = mpcred.lc('cobsdb', 'user')
sqluser = mpcred.lc('sql', 'user')
sqlpwd = mpcred.lc('sql', 'passwd')

# Use Telegram logging
# ------------------------------------------------------------
logpath = '/var/log/magpy/tg_db.log'
sn = 'ALDEBARAN'  # servername
statusmsg = {}
name = "{}-DBopt".format(sn)

# Connect to test database
# ------------------------------------------------------------
try:
    print("Connecting to DATABASE...")
Exemple #16
0
def ValidityCheckDirectories(config={},statusmsg={}, debug=False):
    """
    DESCRIPTION:
        Check availability of paths for saving data products
    """
    name0 = "{}-obligatory directories".format(config.get('logname','Dummy'))
    statusmsg[name0] = 'all accessible'
    successlist = [True,True]

    vpath = config.get('variationpath')
    qpath = config.get('quasidefinitivepath')
    figpath = config.get('magfigurepath')
    dipath = config.get('dipath')
    dbcreds = config.get('dbcredentials')
    if not isinstance(dbcreds,list):
        dbcreds = [dbcreds]
    try:
        # Asuming dbpasswf is also good for mounting
        dbcred = dbcreds[0] # only primary
        dbpasswd = mpcred.lc(dbcred,'passwd')
    except:
        dbpasswd=''


    def umount(path,pwd):
        """usage: umount("/srv/archive")"""
        cmd = 'umount ' + path
        print ("Sending umount command: {}".format(cmd))
        echo = 'echo {}|sudo -S {}'.format(pwd,cmd)
        subprocess.Popen(str(echo), shell=True, stdout=subprocess.PIPE)
        print ("Done")

    def mount(path,pwd):
        """usage: mount("/srv/archive")"""
        cmd = 'mount ' + path
        print ("Sending command: {}".format(cmd))
        echo = 'echo {}|sudo -S {}'.format(pwd,cmd)
        subprocess.Popen(str(echo), shell=True, stdout=subprocess.PIPE)

    if not os.path.isdir(vpath) and not os.path.isdir(qpath) and not os.path.isdir(figpath):
        print ("directory for products not accessible?")
        statusmsg[name0] = 'products unavailable'
        successlist[0] = False
        # all other jobs cannot be performed
        try:
            print ("unmounting...")
            umount("/srv/products",dbpasswd)
            time.sleep(10)
            print ("mounting products again...")
            mount("-a",dbpasswd)
            print ("success...")
            successlist[0] = True
            statusmsg[name0] = 'products unavailable - remounting successful'
        except:
            statusmsg[name0] = 'products unavailable - remounting failed'
    if not os.path.isdir(dipath):
        print ("archive not accessible?")
        statusmsg[name0] = 'archive unavailable'
        successlist[1] = False
        try:
            print ("unmounting...")
            umount("/srv/archive",dbpasswd)
            time.sleep(10)
            print ("mounting archive again...")
            mount("-a",dbpasswd)
            print ("success...")
            statusmsg[name0] = 'archive unavailable - remounting successful'
            successlist[1] = True
        except:
            statusmsg[name0] = 'archive unavailable - remounting failed'

    success = all([i for i in successlist])

    return success, statusmsg
Exemple #17
0
def process_data(station, sensortype, sensors, basepath, fileext, date,
                 plotvariables, **kwargs):
    '''
    DEFINITION:
        STANDARD: Reads data, writes prelim data, plots & writes final data 
	OPTIONAL: Baseline & declination correct, multiply stream, upload, 
	produce plot of last 7 days.
	NOTE: Files must be saved under proper data format:
	--> /BASEPATH/TO/DATA/IAGA-ST-CODE/INSTRUMENT...
	(e.g: /srv/archive/magnetism/tse/lemi...)
	.../raw
	.../plots
	.../filtered
	Must also have file with header information under BASEPATH/CODE/CODE_headers.txt.

    PARAMETERS:
    Variables:
        - station: 		(str) IAGA-code for station
	- sensortype:		(str) 'combined' or 'normal'
				combined = only for magnetic variometer + magnetometer
				normal = all other data
	- sensors:		(str/list) Sensor name, 'ENV05_1_0001'
				Note: for 'combined' this is a list. ['LEMI025_22_0001', 'POS1_N432_0001']
	- basepath:		(str) Path to where data is stored.
	- fileext:		(str/list) File extension of data file, e.g. 'bin', 'cdf', 'txt'
				Note: for 'combined' this is a list. ['bin', 'cdf']
	- date:			(str) Date of data in format %Y-%m-%d / YYYY-MM-DD.
        - plotvariables: 	(list) List of magpy keys to plot.
    Kwargs:
	- logger:		(logging.Logger object) Logger for logging purposes.
	- decl:			(float) Will rotate data by this value, if given
	- mult_factors:		(dict) Will multiply corresponding stream keys with these factors
	- baseline:		(dict) Will baseline correct corresponding keys with these factors
	- upload:		(bool) If True, will upload files
	- sevendayplot:		(bool) If True, will produce plot of last 7 days
	- prelim:		(bool) If True, will save prelim file

    RETURNS:
        - True / False

    EXAMPLE:
        >>> 

    APPLICATION:

    '''
    decl = kwargs.get('decl')
    mult_factors = kwargs.get('mult_factors')
    baseline = kwargs.get('baseline')
    upload = kwargs.get('upload')
    prelim = kwargs.get('prelim')
    sevendayplot = kwargs.get('sevendayplot')
    logger = kwargs.get('logger')

    if not logger:
        logging.basicConfig(level=logging.INFO)
        logger = logging.getLogger(' %s ' % station.upper())

    header_data = {}
    headersfile = os.path.join(basepath, station, '%s_headers.txt' % station)
    headers = open(headersfile, 'r')
    for line in headers:
        hdata = line.split()
        header_data[hdata[0].strip(':')] = hdata[1]

#--------------------------------------------------------------------
# 1. READ DATA, REMOVE OUTLIERS, FILTER

    if sensortype == 'combined':
        vario_sensor = sensors[0]
        magn_sensor = sensors[1]
        v_datafile = '%s_%s.%s' % (vario_sensor, date, fileext[0])
        m_datafile = '%s_%s.%s' % (magn_sensor, date, fileext[1])
        v_datapath = os.path.join(basepath, station, vario_sensor, 'raw',
                                  v_datafile)
        m_datapath = os.path.join(basepath, station, magn_sensor, 'raw',
                                  m_datafile)
        logger.info("Reading files %s and %s..." % (v_datafile, m_datafile))
        v_stream = read(v_datapath)
        m_stream = read(m_datapath)
        #v_stream.remove_outlier()
        #v_stream.remove_flagged()
        #m_stream.remove_outlier()
        #m_stream.remove_flagged()
        v_stream = v_stream.filter()
        m_stream = m_stream.filter()
        stream = mergeStreams(v_stream, m_stream)
        stream.header['col-f'] = 'F'
        stream.header['unit-col-f'] = 'nT'
        sensor = vario_sensor
        title = '%s-%s' % (vario_sensor, magn_sensor)

    elif sensortype == 'normal':
        sensor = sensors
        datafile = '%s_%s.%s' % (sensor, date, fileext)
        logger.info("Reading file %s..." % datafile)
        datapath = os.path.join(basepath, station, sensor, 'raw', datafile)
        stream = read(datapath)
        #stream.remove_outlier()
        #stream.remove_flagged()
        stream = stream.filter()
        title = sensor

    else:
        logger.error(
            "Wrong sensortype (%s). Options are 'combined' and 'normal'." %
            sensortype)

    for data_header in IAGA_headers:
        stream.header[data_header] = header_data[data_header]

    if sensor[:3].lower() == 'lem':
        stream.header['DataType'] = 'Magnetic'
        stream.header['DataComponents'] = 'x, y, z, F [nT]'
        stream.header['DataDigitalSampling'] = '0.1s, 5s'
        dx = 1000. * stream.header['DataCompensationX']
        dy = 1000. * stream.header['DataCompensationY']
        dz = 1000. * stream.header['DataCompensationZ']
        stream.header['DataSensorOrientation'] = "%s, %s, %s" % (dx, dy, dz)
    elif sensor[:3].lower() == 'env':
        stream.header['DataType'] = 'Environmental'
        stream.header[
            'DataComponents'] = 'T (ambient) [C], RH [%], T (dewpoint) [C]'
        stream.header['DataDigitalSampling'] = '1s'
        stream._move_column(plotvariables[0], 'x')
        stream._move_column(plotvariables[1], 'y')
        stream._move_column(plotvariables[2], 'z')
        plotvariables = ['x', 'y', 'z']
    elif sensor[:3].lower() == 'cs1':
        stream.header['DataType'] = 'Magnetic'
        stream.header['DataComponents'] = 'F [nT]'
        stream.header['DataDigitalSampling'] = '1s'

    filenamebegins = '%s_0002' % (title)
    #filenamebegins = '%s_%s_' % (station,title)

    if prelim:
        prelim_path = os.path.join(basepath, station, sensor, 'prelim')
        stream.write(prelim_path,
                     filenamebegins=filenamebegins + '_',
                     format_type='IAGA')
        logger.info("Preliminary data written to %s." % prelim_path)

#--------------------------------------------------------------------
# 2. (OPTIONAL) ROTATE, MULTIPLY, BASELINE CORRECT
#    Steps for PRELIMINARY --> FINAL

    if decl:
        stream.rotation(alpha=decl)

    if mult_factors:
        stream.multiply(mult_factors)

    if baseline:
        stream.offset(baseline)

#--------------------------------------------------------------------
# 3. PLOT

    sensorpadding = {'env': 0.5, 'pos': 10, 'lem': 5, 'cs1': 10}

    plotname = '%s_%s.png' % (filenamebegins, date)
    outfile = os.path.join(basepath, station, sensor, 'plots', plotname)
    mp.plot(stream,
            plotvariables,
            plottitle='%s %s (%s)' % (station.upper(), title, date),
            bgcolor='white',
            confinex=True,
            fullday=True,
            outfile=outfile,
            padding=sensorpadding[sensor[:3].lower()])

    logger.info("Data plotted to %s." % outfile)

    #--------------------------------------------------------------------
    # 4. SAVE & WRITE STREAM TO MINUTE FILE

    #filenamebegins = '%s_%s_' % (station,title)
    finalpath = os.path.join(basepath, station, sensor, filenamebegins)
    stream.write(finalpath,
                 filenamebegins=filenamebegins + '_',
                 format_type='IAGA')

    logger.info("Final data written to %s." % finalpath)

    #--------------------------------------------------------------------
    # 5. UPLOAD (plot + filtered data)

    cred = 'cobshomepage'
    myproxy = mpcred.lc(cred, 'address')
    login = mpcred.lc(cred, 'user')
    passwd = mpcred.lc(cred, 'passwd')
    #passwd = 'ku7tag8!haus' # TODO CHANGE THIS BACK
    port = mpcred.lc(cred, 'port')
    ftppath = 'cmsjoomla/images/stories/currentdata/'

    upload = False
    if upload:
        try:
            filtered_file = '%s_%s.txt' % (filenamebegins, date)
            filtered_path = os.path.join(basepath, station, sensor,
                                         filenamebegins, filtered_file)
            logger.info("Uploading %s..." % filtered_path)
            ftpdatatransfer(
                localfile=filtered_path,
                ftppath=ftppath,  # TODO 
                myproxy=myproxy,
                port=port,
                login=login,
                passwd=passwd,
                raiseerror=True,
                logfile=os.path.join(basepath, station,
                                     '%s-transfer.log' % station))
        except:
            logger.error("Uploading failed.")

        try:
            plot_file = '%s_%s.png' % (filenamebegins, date)
            plot_path = os.path.join(basepath, station, title, 'plots',
                                     plotfile)
            logger.info("Uploading %s..." % plot_path)
            ftpdatatransfer(
                localfile=plotpath,
                ftppath=ftppath,  # TODO 
                myproxy=myproxy,
                port=port,
                login=login,
                passwd=passwd,
                raiseerror=True,
                logfile=os.path.join(basepath, station,
                                     '%s-transfer.log' % station))
        except:
            logger.error("Uploading failed.")

#--------------------------------------------------------------------
# 6. CREATE 7-DAY PLOT (x, y, z, F) & UPLOAD

    if sevendayplot:
        today = datetime.utcnow()
        date = datetime.strftime(today, "%Y-%m-%d")
        datapath = os.path.join(basepath, station, sensor, filenamebegins, '*')
        startdate = datetime.strptime(date, '%Y-%m-%d') - timedelta(days=7)
        start = datetime.strftime(startdate, "%Y-%m-%d") + ' 00:00:00'
        end = date + ' 00:00:00'

        last7days = read(path_or_url=datapath, starttime=start, endtime=end)
        plotname = 'TSE_last7days.png'
        plotpath = os.path.join(basepath, station, '7dayplots', plotname)
        diff = eval(last7days.header['DataSensorOrientation'])
        last7days = last7days.offset(offsets={
            'x': -float(diff[0]),
            'y': -float(diff[1]),
            'z': -float(diff[2])
        })
        last7days = last7days.calc_f()

        fig = mp.plot(last7days, ['x', 'y', 'z', 'f'],
                      plottitle='%s Magnetic Data (%s - %s)' %
                      (station, start[:10], end[:10]),
                      bgcolor='white',
                      noshow=True,
                      padding=5)

        axes = gcf().get_axes()

        day = datetime.strptime(start, '%Y-%m-%d %H:%M:%S')
        while day <= datetime.strptime(end, '%Y-%m-%d %H:%M:%S'):
            if day.weekday() in [5, 6]:  # Saturday or Sunday
                t_start = day
                t_end = day + timedelta(days=1)
                for ax in axes:
                    ax.axvspan(t_start,
                               t_end,
                               facecolor='green',
                               alpha=0.3,
                               linewidth=0)
            day += timedelta(days=1)

        ax.get_xaxis().set_major_formatter(
            matplotlib.dates.DateFormatter('%d.%b\n%H:%M'))

        plt.savefig(plotpath, savedpi=80)
        ftppath = 'zamg/images/graphs/magnetism/'
        oldftppath = 'cmsjoomla/images/stories/currentdata/tse'

        scptransfer(plotpath, '94.136.40.103:' + ftppath, passwd)
Exemple #18
0
    # Name of moon
    #clientname = 'raspberrypi'
    clientname = 'titan'
    # IP of moon
    #clientip = '192.168.178.47'
    clientip = '138.22.188.182'
    # Path of MARTAS directory on moon
    martaspath = '/home/cobs/MARTAS'
    # Provide Station code
    stationid = 'MyHome'
    # Select destination (file or db) - Files are saved in .../MARCOS/MoonsFiles/
    dest = 'db'
    # For Testing purposes - Print received data to screen:
    printdata = True
    # Please make sure that the db and scp connection data is stored within the credential file -otherwise provide this data directly
    dbhost = mpcred.lc('mydb', 'host')
    dbuser = mpcred.lc('mydb', 'user')
    dbpasswd = mpcred.lc('mydb', 'passwd')
    dbname = mpcred.lc('mydb', 'db')
    scpuser = mpcred.lc('cobs', 'user')
    scppasswd = mpcred.lc('cobs', 'passwd')
    # You can add to the credential file by using:
    # mpcred.cc('transfer','myshortcut',user='******',passwd='mypasswd',address='no-specific')
    # and than read it by scpuser = mpcred.lc('myshortcut','myuser')
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    #                 do necessary changes above
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

    log.startLogging(sys.stdout)
    sshcredlst = [scpuser, scppasswd]
    # ----------------------------------------------------------
Exemple #19
0
def main(argv):
    shortcut = ''
    path = ''
    depth = 2
    autofilter = 3
    flagging = False
    startdate = ''
    archiveformat = 'PYCDF'
    flaglist = []
    skip = ''
    samplingrateratio = 12  # 12 days * samplingperiod (sec) will be kept from today (e.g. 12 days of seconds data. 720 days of minute data.
    try:
        opts, args = getopt.getopt(argv, "hc:b:s:i:", [
            "cred=",
            "begin=",
            "skip=",
            "sr=",
        ])
    except getopt.GetoptError:
        print('deleteold.py -c <cred> -b <begin> -s <skip> -i <sr>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('-------------------------------------')
            print('Description:')
            print(
                '-- deleteold.py gets data from a databank and deletes old data sets  --'
            )
            print('Old database entries exceding a defined age')
            print('are deleted.')
            print('The databank size is automatically restricted ')
            print(
                'in dependency of the sampling rate of the input data. Only the last '
            )
            print(
                '12 days of second data, the last 720 days of minute data and '
            )
            print(
                'approximately 118 years of hourly data are kept. To modify these default'
            )
            print(
                'settings please contact the developers (or learn python and')
            print(
                'edit the code - its simple and the MagPy cookbook will help you).'
            )
            print('-------------------------------------')
            print('Usage:')
            print('deleteold.py -c <cred> -b <begin> -s <skip> -i <sr> ')
            print('-------------------------------------')
            print('Options:')
            print(
                '-c (required) : provide the shortcut to the data bank credentials as defined by addcred.py'
            )
            print('-b            : begin: not used so far')
            print(
                '-s            : list sensor IDs to skip (comma separated list)'
            )
            print(
                '-i            : samplingrateratio for deleting old db entries - default is 12'
            )
            print(
                '              : deleting data older than samplingrate(sec)*12 days.'
            )
            print(
                '              : => i=12 : 1sec data older than 12 days is deleted in DB'
            )
            print(
                '              :           1min data older than 720 days is deleted in DB'
            )
            print(
                '              : => i=1  : 1sec data older than 1 day is deleted in DB'
            )
            print(
                '              :           1min data older than 60 days is deleted in DB'
            )
            print('-------------------------------------')
            print('Example:')
            print('every day cron job: python deleteold.py -c cobsdb')
            print(
                'creating archive of old db entries: python archive.py -c cobsdb -p /media/Samsung/Observatory/data/ -d 30 -b "2012-06-01" -g -i 100 -a 3'
            )
            sys.exit()
        elif opt in ("-c", "--cred"):
            cred = arg
        elif opt in ("-b", "--begin"):
            startdate = arg
        elif opt in ("-s", "--skip"):
            skip = arg
        elif opt in ("-i", "--samplingrateratio"):
            try:
                samplingrateratio = int(arg)
            except:
                print("samplingrateratio needs to be an integer")
                sys.exit()

    if cred == '':
        print(
            'Specify a shortcut to the credential information by the -c option:'
        )
        print('-- check addcred.py -h for more options and requirements')
        sys.exit()

    print("Accessing data bank ...")
    try:
        db = mysql.connect(host=mpcred.lc(cred, 'host'),
                           user=mpcred.lc(cred, 'user'),
                           passwd=mpcred.lc(cred, 'passwd'),
                           db=mpcred.lc(cred, 'db'))
        print("success")
    except:
        print("failure - check your credentials / databank")
        sys.exit()

    # Getting dates
    datelist = []
    if startdate == '':
        current = datetime.utcnow()  # make that a variable
    else:
        current = DataStream()._testtime(startdate)

    newcurrent = current
    for elem in range(depth):
        datelist.append(datetime.strftime(newcurrent, "%Y-%m-%d"))
        newcurrent = current - timedelta(days=elem + 1)

    print("Dealing with time range:", datelist)

    testdate = datetime.strftime(
        (datetime.strptime(min(datelist), "%Y-%m-%d") - timedelta(days=1)),
        "%Y-%m-%d")

    # get a list with all datainfoids covering the selected time range
    if startdate:
        start = (datetime.strftime(DataStream()._testtime(startdate),
                                   "%Y-%m-%d"))
        sql = 'SELECT DataID FROM DATAINFO WHERE DataMaxTime > "' + start + '"'
    else:
        sql = 'SELECT DataID FROM DATAINFO WHERE DataMaxTime > "1900-01-01"'
    print(sql)

    # skip BLV measurements from cleanup
    sql = sql + ' AND SensorID NOT LIKE "BLV_%"'
    # skip Quakes from cleanup
    sql = sql + ' AND SensorID NOT LIKE "QUAKES"'

    if len(skip) > 0:
        skipstr = ''
        skiplst = skip.split(',')
        for sensortoskip in skiplst:
            skipstr += ' AND SensorID != "' + sensortoskip + '"'
        sql = sql + skipstr
        print sql

    cursor = db.cursor()
    try:
        cursor.execute(sql)
    except:
        print("Error when reading database")
    datainfoidlist = [elem[0] for elem in cursor.fetchall()]

    print("Cleaning database contens of:", datainfoidlist)

    for data in datainfoidlist:
        print(" ---------------------------- ")
        print(" ---------------------------- ")
        print("Loading data files of", data)
        print(" ---------------------------- ")
        print(" ---------------------------- ")
        print("Starting at: {}".format(datetime.utcnow()))
        # Test of dataid table exists
        try:
            getline = True
            amount = dbgetlines(db, data, 10000)
        except:
            print("Could not get lines from data file")
            getline = False

        if getline:
            sr = amount.samplingrate()
            delete = False
            if amount.length()[0] > 0:
                delete = True
                print(" ---------- Deleting old data for {}".format(data))
            else:
                print(" ---------- Doing nothing for table {}".format(data))
                #sql = 'DELETE FROM DATAINFO WHERE DataID = "{}"'.format(data)
                #print (sql)
                #cursor = db.cursor()
                #cursor.execute(sql)
                #db.commit()
                #cursor.close()

        if not isnan(sr) and delete and getline:
            print("Now deleting old entries in database older than %s days" %
                  str(int(sr * samplingrateratio)))
            dbdelete(db, data, samplingrateratio=samplingrateratio)
            print(" -> ... success")
Exemple #20
0
def main(argv):
    creddb = ''  # c
    dipath = ''  # a
    variolist = ''  # v
    variodataidlist = ''  # j
    scalarlist = ''  # s
    scalardataidlist = ''  # k
    pierlist = ''  # p
    abstypelist = ''  # y
    azimuthlist = ''  # z
    archive = ''  # w   (e.g. /srv/archive)
    identifier = 'BLV'  # f
    stationid = 'wic'  # t
    fallbackvariopath = ''  # o
    fallbackscalarpath = ''  # l
    begin = '1900-01-01'  # b
    end = datetime.strftime(datetime.utcnow(), "%Y-%m-%d")  # e
    expD = 3  # d
    expI = 64  # i
    compensation = False  # m
    rotation = False  # q
    dbadd = False  # n
    addBLVdb = False  # n
    flagging = False  # g
    createarchive = False  # r
    webdir = '/var/www/joomla/images/didaten/'  # TODO add option
    webuser = '******'
    webgroup = 'www-data'
    defaultuser = '******'
    defaultgroup = 'cobs'
    debug = False

    flaglist = []
    keepremote = False
    getremote = False
    remotecred = ''
    remotepath = ''
    variopath = ''  #
    scalarpath = ''  #

    try:
        opts, args = getopt.getopt(
            argv, "hc:a:v:j:s:k:o:mql:b:e:t:z:d:i:p:y:w:f:ngrx:u:D", [
                "cred=", "dipath=", "variolist=", "variodataidlist=",
                "scalarlist=", "scalardataidlist=", "variopath=",
                "compensation=", "rotation=", "scalarpath=", "begin=", "end=",
                "stationid=", "pierlist=", "abstypelist=", "azimuthlist=",
                "expD=", "expI=", "write=", "identifier=", "add2DB=", "flag=",
                "createarchive=", "webdir=", "keepremote", "debug="
            ])
    except getopt.GetoptError:
        print(
            'di.py -c <creddb> -a <dipath> -v <variolist>  -j <variodataidlist> -s <scalarlist> -o <variopath> -m <compensation> -q <rotation> -l <scalarpath> -b <startdate>  -e <enddate> -t <stationid>  -p <pierlist> -z <azimuthlist> -y <abstypelist> -d <expectedD> -i <expectedI> -w <writepath> -f<identifier> -n <add2DB>  -g  <flag> -r <createarchive> -x <webdir> -u <user> --keepremote'
        )
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('-------------------------------------')
            print('Description:')
            print(
                'dianalysis.py reads DI measurements and calculates DI values.'
            )
            print('Provide variometer and scalar data for correction.')
            print('Returns di values, f and collimation angles.')
            print(
                'A number of additional option allow for archiving, validity tests,'
            )
            print(
                'and output redirection. If variometer data is provided, base values'
            )
            print('are calculated. ')
            print('')
            print('-------------------------------------')
            print('Usage:')
            print(
                'di.py -c <creddb> -a <dipath> -v <variolist>  -j <variodataidlist> -s <scalarlist> -o <variopath> -l <scalarpath> -m <compensation> -q <rotation> -b <startdate>  -e <enddate> -t <stationid>  -p <pierlist> -z <azimuthlist> -y <abstypelist> -d <expectedD> -i <expectedI> -w <writepath> -n <add2DB>  -g  <flag> -r <createarchive> -x <webdir> -u <user> --keepremote'
            )
            print('-------------------------------------')
            print('Options:')
            print(
                '-c            : provide the shortcut to the data bank credentials'
            )
            print(
                '-a (required) : path to DI data - can be either a real path to a local directory'
            )
            print(
                '                    or a credential shortcut for a remote connection '
            )
            print(
                '-w (required) : archivepath for writing results and eventually accessing data.'
            )
            print('                e.g. /srv/archive. or /tmp')
            print(
                '                below this folder the following structure will be implemented:'
            )
            print('                -/srv/archive/"StationID"/DI/analyse : ')
            print(
                '                                 folder with raw data to be analyzed'
            )
            print('                -/srv/archive/"StationID"/DI/data : ')
            print(
                '                                 folder with calculated di results'
            )
            print(
                '                                 Name will be set to "BLV_" + variometerID + pier.'
            )
            print(
                '                                 This plain text file can be opend and'
            )
            print(
                '                                 analyzed with the MagPy stream package.'
            )
            print('                -/srv/archive/"StationID"/DI/raw : ')
            print(
                '                             archivefolder with successfully analyzed raw data'
            )
            print(
                '-f            : idetifier for BLV data (in database and filename) - default is BLV'
            )
            print(
                '-v            : variolist - comma separated list of variometer ids'
            )
            print(
                '-j            : variodataidlist - specify the dataids to be used fro each vario'
            )
            print('                Default: 0002 for each variometer sensor')
            print('-o            : path to variometer data')
            print(
                '-m (no input) : apply compensation field values to variometer data'
            )
            print('-q (no input) : apply rotation to variometer data')
            print('-s            : scalarpath - path to scalar data')
            print(
                '-k            : scalardataidlist - specify the dataids to be used fro each scalar'
            )
            print('                Default: 0002 for each scalar sensor')
            print(
                '-b            : startdate - begin of analysis  (not yet active)'
            )
            print(
                '-e            : enddate - default is today (not yet active)')
            print(
                '-t (required) : ID of the station i.e. the Observatory code (required if'
            )
            print('                not in meta data of DI measurements)')
            print(
                '-z            : list of astronomic azimuths of the mark from the measurement pier'
            )
            print(
                '              : Azimuthlist needs either to be empty or to have the same order '
            )
            print('                and length of the pierlist.')
            print(
                '                use "False" if the specific value should be taken from the'
            )
            print('                originalfile/database')
            print('                e.g. -p D3,H6  -z False,156.678')
            print(
                '-y            : comma separated list of absolute data types for each pier'
            )
            print('              : "di" for standard theodolite or "autodif" ')
            print('              : e.g. -p D3,H6 -m di,autodif ')
            print('-d            : expected declination')
            print('-i            : expected inclination')
            print(
                '-p (required) : name/number of the pier, comma separated list'
            )
            print('-n (no input) : add di and basevalues to data base')
            print(
                '-g (no input) : read flaglist from DB if db is opened and add flags'
            )
            print(
                '-r            : move successfully analyzed files to raw archive'
            )
            print('-x            : directory to copy non-analyzed files to.')
            print(
                '              : can be a www directory at which PHP-scripts are used to edit data.'
            )
            print('-u            : define user for which jobs are performed')
            print('              : e.g. cobs:cobsgroup')
            print(
                '--keepremote  : Don t delete remote files after dowloading them'
            )
            print('-------------------------------------')
            print('Examples:')
            print('1. Running on MARCOS servers:')
            print(
                'python di.py -c wic -a cobshome,cobenzlabs -v "FGE_S0252_0001"'
            )
            print('      -s "POS1_N432_0001" -j 0002 -b 2014-01-01')
            print(
                '      -w /media/DAE2-4808/archive -d 3 -i 64 -t wic -p H1,A7,A2,A16'
            )
            print(
                '      -y di,di,di,autodif -z False,179.8978,180.1391,267.3982'
            )
            print('2. Running it with manually provided data links:')
            print(
                'python di.py -c wic -a /media/DAE2-4808/archive/WIC/DI/analyze'
            )
            print(
                '      -v "FGE_S0252_0001" -s "POS1_N432_0001" -j 0002 -b 2014-02-01'
            )
            print(
                '      -e 2014-05-01 -w /media/DAE2-4808/archive -d 3 -i 64 -t wic'
            )
            print('      -p H1,A7,A2,A16 -y di,di,di,autodif -r')
            print('      -z False,179.8978,180.1391,267.3982 -u user:group')
            print('python di.py -c cobs -a cobshomepage,cobenzlabs ')
            print(
                '      -v DIDD_3121331_0002,LEMI025_1_0002 -s DIDD_3121331_0002'
            )
            print('      -j 0001,0001 -b 2014-10-01 -e 2014-10-07')
            print('      -w /srv/archive -d 3 -i 64 -t wik -p D -n -r')
            sys.exit()
        elif opt in ("-c", "--creddb"):
            creddb = arg
        elif opt in ("-a", "--dipath"):
            dipath = arg
        elif opt in ("-w", "--archive"):
            archive = arg
        elif opt in ("-f", "--identifier"):
            identifier = arg
        elif opt in ("-v", "--variolist"):
            variolist = arg.split(',')
        elif opt in ("-j", "--variodataidlist"):
            variodataidlist = arg.split(',')
        elif opt in ("-s", "--scalarlist"):
            scalarlist = arg.split(',')
        elif opt in ("-k", "--scalardataidlist"):
            scalardataidlist = arg.split(',')
        elif opt in ("-o", "--variopath"):
            fallbackvariopath = arg
        elif opt in ("-m", "--compensation"):
            compensation = True
        elif opt in ("-q", "--rotation"):
            rotation = True
        elif opt in ("-l", "--scalarpath"):
            fallbackscalarpath = arg
        elif opt in ("-b", "--begin"):
            begin = arg
        elif opt in ("-e", "--end"):
            end = arg
        elif opt in ("-t", "--stationid"):
            stationid = arg
        elif opt in ("-p", "--pierlist"):
            pierlist = arg.split(',')
        elif opt in ("-z", "--azimuthlist"):
            azimuthlist = arg.split(',')
        elif opt in ("-y", "--abstypelist"):
            abstypelist = arg.split(',')
        elif opt in ("-x", "--webdir"):
            webdir = arg
        elif opt in ("-u", "--user"):
            user = arg.split(':')
            if len(user) > 1:
                defaultuser = user[0]
                defaultgroup = user[1]
        elif opt in ("-d", "--expectedD"):
            try:
                expD = float(arg)
            except:
                print("expected declination needs to be a float")
                sys.exit()
        elif opt in ("-i", "--expectedI"):
            try:
                expI = float(arg)
            except:
                print("expected inclination needs to be a float")
                sys.exit()
        elif opt in ("-n", "--add2db"):
            dbadd = True
            addBLVdb = True
        elif opt in ("-g", "--flag"):
            flagging = True
        elif opt in ("--keepremote"):
            keepremote = True
        elif opt in ("-r", "--createarchive"):
            createarchive = True
        elif opt in ("-D", "--debug"):
            debug = True

    print("-------------------------------------")
    print("Starting di analysis ... MARTAS version {}".format(__version__))
    print("-------------------------------------")

    if dipath == '':
        print(' Specify the path to the DI data: -a /path/to/my/data !')
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    if archive == '':
        print(
            ' Specify an Archive path for writing results: -w /path/to/my/archive !'
        )
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    if variolist == '':
        variolist = []
    if scalarlist == '':
        scalarlist = []
    if azimuthlist == '':
        azimuthlist = []
    if abstypelist == '' or len(abstypelist) == 0:
        abstypelist = ['di' for elem in pierlist]
    if variodataidlist == '':
        variodataidlist = []
    if scalardataidlist == '':
        scalardataidlist = []
    if len(variodataidlist) == 0:
        variodataidlist = ['0002' for elem in variolist]
    else:
        if not len(variolist) == len(variodataidlist):
            print(
                ' You need to specify a specific DataID for each variometer: e.g. -j 0002,0001'
            )
            print(
                ' -- check dianalysis.py -h for more options and requirements')
            sys.exit()
    if len(scalardataidlist) == 0:
        scalardataidlist = ['0002' for elem in scalarlist]
    else:
        if not len(scalarlist) == len(scalardataidlist):
            print(
                ' You need to specify a specific DataID for each variometer: e.g. -j 0002,0001'
            )
            print(
                ' -- check dianalysis.py -h for more options and requirements')
            sys.exit()

    if not len(abstypelist) == 0:
        if not len(abstypelist) == len(pierlist):
            print(
                ' Abstypelist needs to have the same order and length of the pierlist'
            )
            print(
                ' -- check dianalysis.py -h for more options and requirements')
            sys.exit()

    try:
        test = datetime.strptime(begin, "%Y-%m-%d")
        print(test)
    except:
        print(' Date format for begin seems to be wrong: -b 2013-11-22')
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    try:
        datetime.strptime(end, "%Y-%m-%d")
    except:
        print(' Date format for end seems to be wrong: -e 2013-11-22')
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    if pierlist == []:
        print(
            ' Specify a list of the measurement piers containing at list one element: -p [Pier2]'
        )
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()

    if not len(azimuthlist) == 0:
        if not len(azimuthlist) == len(pierlist):
            print(
                ' Azimuthlist needs to have the same order and length of the pierlist'
            )
            print(
                ' -- check dianalysis.py -h for more options and requirements')
            sys.exit()

    if stationid == '':
        print(' Specify a station name e.g. your observatory code')
        print(' -- check dianalysis.py -h for more options and requirements')
        sys.exit()
    else:
        stationid = stationid.upper()

    if not creddb == '':
        print("  Accessing data bank ...")
        try:
            db = mysql.connect(host=mpcred.lc(creddb, 'host'),
                               user=mpcred.lc(creddb, 'user'),
                               passwd=mpcred.lc(creddb, 'passwd'),
                               db=mpcred.lc(creddb, 'db'))
            print("  ... success")
        except:
            print("  ... failure - check your credentials")
            sys.exit()
    else:
        db = False

    if not fallbackvariopath == '':
        if not fallbackvariopath.endswith('*'):
            fallbackvariopath = os.path.join(fallbackvariopath, '*')
        variopath = fallbackvariopath

    if not fallbackscalarpath == '':
        if not fallbackscalarpath.endswith('*'):
            fallbackscalarpath = os.path.join(fallbackscalarpath, '*')
        scalarpath = fallbackscalarpath

    if variolist == []:
        if fallbackvariopath == '':
            print(
                '  !! You have not provided any variometer information at all')

    if scalarlist == []:
        if fallbackscalarpath == '':
            print('  You have not provided any independent scalar information')
            print(
                '  -> we asume this data is provided along with the DI files')

    # -----------------------------------------------------
    # a) Basic information
    # -----------------------------------------------------
    if debug:
        print(" -------------------------------------")
        print(" Archive", archive)
        print(" Variolist", variolist)
        print(" Abstypelist", abstypelist)
        print(" Dipath", dipath)

    # -----------------------------------------------------
    # b) Getting new raw data from the input server
    # -----------------------------------------------------
    print(" -------------------------------------")
    print(" Identifying DI data source")
    if not os.path.exists(dipath):
        print("  Checking given DI path for credential information ...")
        try:
            credlist = mpcred.sc()
            credshort = [elem[0] for elem in credlist]
            print("  ... found credentials")
        except:
            print(
                " dipath {} not existing - credentials not accessible - aborting"
                .format(dipath))
            sys.exit()
        try:
            dic = dipath.split(',')
            print(dic)
            print(len(dic))
            if len(dic) == 2:
                remotecred = dic[0]
                remotepath = dic[1]
                print(
                    "  Using credentials {} to get DI data from the remote path {}"
                    .format(dic[0], dic[1]))
            elif len(dic) == 1:
                remotecred = dic[0]
                remotepath = ''
                print("  Using credentials {} to get DI data".format(dic[0]))
            else:
                print(
                    "  -> could not interprete dipath in terms of credential information"
                )
                sys.exit()
            if remotecred in credshort:
                getremote = True
            else:
                print(
                    "  -> dipath %s not existing - credentials not existing - aborting"
                    % dipath)
        except:
            print(
                "  -> dipath %s not existing - credentials not existing - aborting"
                % dipath)
            sys.exit()
        if getremote == False:
            sys.exit()
    else:
        print("  Found directory at specified dipath location")

    # Getting data from the webdir (eventually edited and corrected)
    if createarchive and not webdir == '':
        print(" -------------------------------------")
        print(" Createarchive and webdir selected ...")
        dipath = os.path.join(archive, stationid, 'DI', 'analyze')
        for pier in pierlist:
            diid = pier + '_' + stationid + '.txt'
            for infile in iglob(os.path.join(webdir, '*' + diid)):
                # Testing whether file exists:
                if os.path.exists(
                        os.path.join(dipath,
                                     os.path.split(infile)[1])):
                    print("  Deleting:",
                          os.path.join(dipath,
                                       os.path.split(infile)[1]))
                    os.remove(os.path.join(dipath, os.path.split(infile)[1]))
                print("  Retrieving from webdir: ", infile)
                shutil.copy(infile, dipath)
                # Setting permission to defaultuser even if started the job
                uid = pwd.getpwnam(defaultuser)[2]
                gid = grp.getgrnam(defaultgroup)[2]
                os.chown(os.path.join(dipath,
                                      os.path.split(infile)[1]), uid, gid)
                # Deleting file from web dir
                try:
                    os.remove(os.path.join(webdir, os.path.split(infile)[1]))
                except:
                    print("  !! No permissions to modify webdirectory")
                    pass

    # copy all files from web directory to the analysis folder
    if getremote:
        delete = True
        if keepremote:
            delete = False
        print(" -------------------------------------")
        print(
            " Getting remote data - deleting downloaded data from remote source set to {}"
            .format(delete))
        dipath = os.path.join(archive, stationid, 'DI', 'analyze')
        for pier in pierlist:
            if not os.path.exists(dipath):
                os.makedirs(dipath)
            diid = pier + '_' + stationid + '.txt'
            try:
                port = mpcred.lc(remotecred, 'port')
            except:
                port = 21
            ftpget(mpcred.lc(remotecred, 'address'),
                   mpcred.lc(remotecred, 'user'),
                   mpcred.lc(remotecred, 'passwd'),
                   remotepath,
                   os.path.join(archive, stationid, 'DI', 'analyze'),
                   diid,
                   port=port,
                   delete=delete)

    if debug:
        print(" -------------------------------------")
        print(
            " DI data defined and collected - now starting the analysis for variometer: {}"
            .format(variolist))

    print(" ")

    # -----------------------------------------------------
    # c) analyze all files in the local analysis directory and put successfully analyzed data to raw
    # -----------------------------------------------------
    for pier in pierlist:
        print("######################################################")
        print("Starting analysis for pier ", pier)
        print("######################################################")
        abspath = dipath
        diid = pier + '_' + stationid + '.txt'
        for vario in variolist:
            dataid = variodataidlist[variolist.index(vario)]
            if os.path.exists(
                    os.path.join(archive, stationid, vario,
                                 vario + '_' + dataid)):
                variopath = os.path.join(archive, stationid, vario,
                                         vario + '_' + dataid, vario + '*')
            else:
                variopath = vario
                if not os.path.exists(variopath):
                    print(
                        " -> No variometerdata found in the specified paths/IDs - using dummy path"
                    )
                    variopath = '/tmp/*'
            print(" -> Using Variometerdata at:", variopath)
            for scalar in scalarlist:
                # Define paths for variometer and scalar data
                scalarid = scalardataidlist[scalarlist.index(scalar)]
                if os.path.exists(
                        os.path.join(archive, stationid, scalar,
                                     scalar + '_' + scalarid)):
                    scalarpath = os.path.join(archive, stationid, scalar,
                                              scalar + '_' + scalarid,
                                              scalar + '*')
                else:
                    scalarpath = scalar
                    if not os.path.exists(scalarpath):
                        print(
                            " -> No scalar data found in the specified paths/IDs - using dummy path"
                        )
                        scalarpath = '/tmp/*'
                print(" -> Using Scalar data at:", scalarpath)
                # ALPHA and delta needs to be provided with the database

                print(" -------------------------------------")
                print(
                    " Extracting delta and rotation parameters ... should not be necessary as this is be done by absoluteAnalysis provided a database is connected"
                )
                deltaF = 0.0
                alpha = 0.0
                beta = 0.0
                """
                if db:
                    print(" ")
                    alpha =  dbgetfloat(db, 'DATAINFO', vario, 'DataSensorAzimuth')
                    if not isNumber(alpha):
                        alpha = 0.0
                    beta =  dbgetfloat(db, 'DATAINFO', vario, 'DataSensorTilt')
                    if not isNumber(beta):
                        beta = 0.0
                    deltaF =  dbgetfloat(db, 'DATAINFO', scalar, 'DataDeltaF')
                    if not isNumber(deltaF):
                        deltaF = 0.0
                else:
                    # eventually add an input option
                    # load a scalar file from path and get delta F from header
                    try:
                        scal = read(scalarpath,starttime=begin,endtime=begin)
                        try:
                            scal = applyDeltas(db,scal)
                            deltaF = 0.0
                        except:
                            deltaF = scal.header['DataDeltaF']
                    except:
                        deltaF = 0.0
                    try:
                        var = read(variopath,starttime=begin,endtime=begin)
                        try:
                            var = applyDeltas(db,var)
                        except:
                            pass
                        # TODO this is wrong -> but clarify whether a correction is necessary at all 
                        alpha = var.header['DataSensorAzimuth']
                        beta = var.header['DataSensorTilt']
                    except:
                        alpha = 0.0
                        beta = 0.0
                print("using alpha, beta, deltaF:", alpha, beta, deltaF)
                """

                print(" -------------------------------------")
                print(
                    " Extracting azimuth data ... should be contained in DI files, can be provided as option, is contained in PIERS table of DB"
                )
                # Azimuths are usually contained in the DI files
                ## Eventually overriding azimuths in DI files
                if len(azimuthlist) > 0:
                    azimuth = azimuthlist[pierlist.index(pier)]
                    if azimuth == 'False' or azimuth == 'false':
                        azimuth = False
                else:
                    azimuth = False
                if azimuth:
                    print(
                        " -> Overriding (eventual) DI files data with an azimuth of {} deg"
                        .format(azimuth))
                else:
                    print(" -> Using azimuth from DI file")
                if len(abstypelist) > 0:
                    abstype = abstypelist[pierlist.index(pier)]
                    if abstype == 'False' or abstype == 'false':
                        abstype = False
                else:
                    abstype = False
                if abstype:
                    print(" -> Selected type of absolute measurements is {}".
                          format(abstype))
                else:
                    print(" -> Absolute measurement type taken from DI file")
                # TODO ... Get azimuth data from PIERS table
                if db:
                    print(
                        " Checking azimuth in PIERS table of the database ...")
                    val = dbselect(db, 'AzimuthDictionary', 'PIERS',
                                   'PierID like "{}"'.format(pier))[0]
                    print("Found ", val)

                print(" -------------------------------------")
                movetoarchive = False
                if createarchive and variolist.index(
                        vario) == len(variolist) - 1 and scalarlist.index(
                            scalar) == len(scalarlist) - 1:
                    print(
                        " Running analysis - and moving successfully analyzed files to raw directory"
                    )
                    movetoarchive = os.path.join(archive, stationid, 'DI',
                                                 'raw')
                else:
                    print(
                        " Running analysis - and keeping files in analyze directory"
                    )
                absstream = absoluteAnalysis(abspath,
                                             variopath,
                                             scalarpath,
                                             expD=expD,
                                             expI=expI,
                                             diid=diid,
                                             stationid=stationid,
                                             abstype=abstype,
                                             azimuth=azimuth,
                                             pier=pier,
                                             alpha=alpha,
                                             deltaF=deltaF,
                                             starttime=begin,
                                             endtime=end,
                                             db=db,
                                             dbadd=dbadd,
                                             compensation=compensation,
                                             magrotation=rotation,
                                             movetoarchive=movetoarchive,
                                             deltaD=0.0000000001,
                                             deltaI=0.0000000001)
                print(" -> Done")

                # -----------------------------------------------------
                # d) write data to a file and sort it, write it again
                #          (workaround to get sorting correctly)
                # -----------------------------------------------------
                print(" -------------------------------------")
                if absstream and absstream.length()[0] > 0:
                    print(" Writing {} data line(s) ...".format(
                        absstream.length()[0]))
                    absstream.write(os.path.join(archive, stationid, 'DI',
                                                 'data'),
                                    coverage='all',
                                    mode='replace',
                                    filenamebegins=identifier + '_' + vario +
                                    '_' + scalar + '_' + pier)
                    try:
                        # Reload all data, delete old file and write again to get correct ordering
                        newabsstream = read(
                            os.path.join(
                                archive, stationid, 'DI', 'data', identifier +
                                '_' + vario + '_' + scalar + '_' + pier + '*'))
                        os.remove(
                            os.path.join(
                                archive, stationid, 'DI', 'data',
                                identifier + '_' + vario + '_' + scalar + '_' +
                                pier + '.txt'))  # delete file from hd
                        newabsstream.write(os.path.join(
                            archive, stationid, 'DI', 'data'),
                                           coverage='all',
                                           mode='replace',
                                           filenamebegins=identifier + '_' +
                                           vario + '_' + scalar + '_' + pier)
                    except:
                        print(" Stream apparently not existing...")
                    print(" -> Done")
                    if addBLVdb:
                        # SensorID necessary....
                        print(" Adding data to the data bank ... ")
                        #newabsstream.header["SensorID"] = vario
                        writeDB(db,
                                absstream,
                                tablename=identifier + '_' + vario + '_' +
                                scalar + '_' + pier)
                        #stream2db(db,newabsstream,mode='force',tablename=identifier+'_'+vario+'_'+scalar+'_'+pier)
                        print(" -> Done")

        # -----------------------------------------------------
        # f) get flags and apply them to data
        # -----------------------------------------------------
                    if db and flagging and addBLVdb:
                        newabsstream = readDB(
                            db, identifier + '_' + vario + '_' + scalar + '_' +
                            pier)
                        flaglist = db2flaglist(
                            db, identifier + '_' + vario + '_' + scalar + '_' +
                            pier)
                    elif addBLVdb:
                        newabsstream = readDB(
                            db, identifier + '_' + vario + '_' + scalar + '_' +
                            pier)
                        flaglist = []
                    if len(flaglist) > 0:
                        flabsstream = newabsstream.flag(flaglist)
                        #for i in range(len(flaglist)):
                        #    flabsstream = newabsstream.flag_stream(flaglist[i][2],flaglist[i][3],flaglist[i][4],flaglist[i][0],flaglist[i][1])
                        flabsstream.write(os.path.join(archive, stationid,
                                                       'DI', 'data'),
                                          coverage='all',
                                          filenamebegins=identifier + '_' +
                                          vario + '_' + scalar + '_' + pier)
                        pltabsstream = flabsstream.remove_flagged()

        # -----------------------------------------------------
        # h) fit baseline and plot
        # -----------------------------------------------------
                    try:
                        #pltabsstream = read(os.path.join(archive,stationid,'DI','data',identifier+'_'+vario+'_'+scalar+'_'+pier+'*'))
                        pltabsstream.trim(starttime=datetime.utcnow() -
                                          timedelta(days=380))
                        # fit baseline using the parameters defined in db (if parameters not available then skip fitting)
                        #absstream = absstream.fit(['dx','dy','dz'],poly,4)
                        savename = identifier + '_' + vario + '_' + scalar + '_' + pier + '.png'
                        #absstream = absstream.extract('f',98999,'<')
                        mp.plot(pltabsstream, ['dx', 'dy', 'dz'],
                                symbollist=['o', 'o', 'o'],
                                plottitle=vario + '_' + scalar + '_' + pier,
                                outfile=os.path.join(archive, stationid, 'DI',
                                                     'graphs', savename))
        #absstream.plot(['dx','dy','dz'],symbollist=['o','o','o'],plottitle=vario+'_'+scalar+'_'+pier,outfile=os.path.join(archive,stationid,'DI','graphs',savename))
                    except:
                        pass

    # -----------------------------------------------------
    # j) move files from analyze folder to web folder
    # -----------------------------------------------------
    # move only if createarchive is selected
    if createarchive:
        print(" -------------------------------------")
        print(
            " Dont mind the error message - works only if su at cron is running this job"
        )
        filelst = []
        for infile in iglob(
                os.path.join(archive, stationid, 'DI', 'analyze', '*.txt')):
            print("Processing ", infile)
            filelst.append(infile)
            destination = '/var/www/joomla/images/didaten/'
            infilename = os.path.split(infile)
            print(infilename)
            try:
                shutil.copy(infile, destination)
                #perform changes to privs
                if not webuser == '':
                    uid = pwd.getpwnam(webuser)[2]
                    gid = grp.getgrnam(webgroup)[2]
                    os.chown(os.path.join(destination, infilename[1]), uid,
                             gid)
            except:
                print("Webdir not accessible - finishing")
                pass

    print("----------------------------------------------------------------")
    print("di.py app finished")
    print("----------------------------------------------------------------")
    print("SUCCESS")
Exemple #21
0
                # lastfiles looks like: {'/path/to/my/file81698.txt' : '2019-01-01T12:33:12', ...}

        if not lastfiles == {}:
            print ("write memory")
            pass

        sourcepath = workdictionary.get(key).get('path')
        starttime = workdictionary.get(key).get('starttime')
        endtime = workdictionary.get(key).get('endtime')
        newfiledict, alldic = getchangedfiles(sourcepath, lastfiles, starttime, endtime)

        print ("Found new: {} and all {}".format(newfiledict, alldic))

        for dest in workdictionary.get(key).get('destinations'):
            print ("  -> Destination: {}".format(dest))
            address=mpcred.lc(dest,'address')
            user=mpcred.lc(dest,'user')
            passwd=mpcred.lc(dest,'passwd')
            port=mpcred.lc(dest,'port')
            destdict = workdictionary.get(key).get('destinations')[dest]
            #print (destdict)
            if address and user and newfiledict:
                for nfile in newfiledict:
                    print ("    -> Uploading {} to dest {}".format(nfile, dest))
                    success = uploaddata(nfile, destdict.get('path'), destdict.get('type'), address, user, passwd, port, logfile=destdict.get('logfile','stdout'))
                    print ("    -> Success", success)
                    if not success:
                        #remove nfile from alldic 
                        # thus it will be retried again next time
                        print (" !---> upload of {} not successful: keeping it in todo list".format(nfile))
                        del alldic[nfile]
Exemple #22
0
    # Name of moon
    #clientname = 'raspberrypi'
    clientname = 'titan'
    # IP of moon
    #clientip = '192.168.178.47'
    clientip = '138.22.188.182'
    # Path of MARTAS directory on moon
    martaspath = '/home/cobs/MARTAS'
    # Provide Station code
    stationid = 'MyHome'
    # Select destination (file or db) - Files are saved in .../MARCOS/MoonsFiles/
    dest = 'db'
    # For Testing purposes - Print received data to screen:
    printdata = True
    # Please make sure that the db and scp connection data is stored within the credential file -otherwise provide this data directly
    dbhost = mpcred.lc('mydb','host')
    dbuser = mpcred.lc('mydb','user')
    dbpasswd = mpcred.lc('mydb','passwd')
    dbname = mpcred.lc('mydb','db')
    scpuser = mpcred.lc('cobs','user')
    scppasswd = mpcred.lc('cobs','passwd')
    # You can add to the credential file by using:
    # mpcred.cc('transfer','myshortcut',user='******',passwd='mypasswd',address='no-specific')
    # and than read it by scpuser = mpcred.lc('myshortcut','myuser')
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    #                 do necessary changes above
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!


    log.startLogging(sys.stdout)
    sshcredlst = [scpuser,scppasswd]
Exemple #23
0
def main(argv):
    #broker = '192.168.178.75'
    broker = 'localhost'  # default
    #broker = '192.168.178.84'
    #broker = '192.168.0.14'
    port = 1883
    timeout=60
    user = ''
    password = ''
    global destination
    destination='stdout'
    global location
    location='/tmp'
    global credentials
    credentials=''
    global offset
    offset = ''
    global dbcred
    dbcred=''
    global stationid
    stationid = 'wic'
    global source
    source='mqtt' # projected sources: mqtt (default), wamp, mysql, postgres, etc
    global qos
    qos=0
    global debug
    debug = False
    global output
    global headstream
    headstream = {}
    #global verifiedlocation
    #verifiedlocation = False
    global dictcheck
    dictcheck = False

    usagestring = 'collector.py -b <broker> -p <port> -t <timeout> -o <topic> -d <destination> -l <location> -c <credentials> -r <dbcred> -q <qos> -u <user> -P <password> -s <source> -f <offset>'
    try:
        opts, args = getopt.getopt(argv,"hb:p:t:o:d:l:c:r:q:u:P:s:f:U",["broker=","port=","timeout=","topic=","destination=","location=","credentials=","dbcred=","qos=","debug=","user="******"password="******"source=","offset="])
    except getopt.GetoptError:
        print('Check your options:')
        print(usagestring)
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('------------------------------------------------------')
            print('Usage:')
            print(usagestring)
            print('------------------------------------------------------')
            print('Options:')
            print('-h                             help')
            print('-b                             set broker address: default = localhost')
            print('-p                             set port - default is 1883')
            print('-t                             set timeout - default is 60')
            print('-o                             set base topic - for MARTAS this corresponds')
            print('                               to the station ID (e.g. wic)')
            print('-d                             set destination - std.out, db, file') 
            print('                               default is std.out') 
            print('-l                             set location depending on destination')
            print('                               if d="file": provide path')
            print('                               if d="db": provide db credentials')
            print('                               if d="std.out": not used')
            print('-c                             set mqtt communication credential keyword')
            print('-q                             set mqtt quality of service (default = 0)')            
            print('-r                             set db credential keyword')            
            print('-s                             source protocol of data: default is mqtt')            
            print('                               other options:')
            print('                               -s wamp    not yet implemented.')
            print('-f                             offset values. Can either be "db" for')
            print('                               applying delta values from db or a string')
            print('                               of the following format (key:value):')
            print('                               -f "t1:3.234,x:45674.2"')
            print('                               other options:')
            print('------------------------------------------------------')
            print('Examples:')
            print('1. Basic')
            print('   python collector.py -b "192.168.0.100" -o wic')
            print('2. Writing to file in directory "/my/path/"')
            print('   python collector.py -b "192.168.0.100" -d file -l "/my/path" -o wic')
            print('3. Writing to file and stdout')
            print('   python collector.py -b "192.168.0.100" -d file,stdout -l "/tmp" -o wic')
            print('4. Writing to db')
            print('   python collector.py -b "192.168.0.100" -d db -r mydb -o wic')
            print('   python collector.py -d "db,file" -r testdb')
            sys.exit()
        elif opt in ("-b", "--broker"):
            broker = arg
        elif opt in ("-p", "--port"):
            try:
                port = int(arg)
            except:
                port = arg
        elif opt in ("-t", "--timeout"):
            try:
                timeout = int(arg)
            except:
                timeout = arg
        elif opt in ("-o", "--topic"):
            stationid = arg
        elif opt in ("-s", "--source"):
            source = arg
        elif opt in ("-d", "--destination"):
            destination = arg
        elif opt in ("-l", "--location"):
            location = arg
        elif opt in ("-c", "--credentials"):
            credentials = arg
        elif opt in ("-r", "--dbcred"):
            dbcred = arg
        elif opt in ("-q", "--qos"):
            try:
                qos = int(arg)
            except:
                qos = 0
        elif opt in ("-u", "--user"):
            user = arg
        elif opt in ("-P", "--password"):
            password = arg
        elif opt in ("-f", "--offset"):
            offset = arg
        elif opt in ("-U", "--debug"):
            debug = True

    if not qos in [0,1,2]:
        qos = 0
    if 'stringio' in destination:
        output = StringIO.StringIO()
    if 'file' in destination:
        if location in [None,''] and not os.path.exists(location):
            print ('destination "file" requires a valid path provided as location')
            print (' ... aborting ...')
            sys.exit()
    if 'websocket' == destination:
        if ws_available:
            wsThr = threading.Thread(target=wsThread)
            print('starting websocket on port 5000...')
            wsThr.start()
        else:
            print("use pip install websocket-server")
            sys.exit()
    if 'db' in destination:
        if dbcred in [None,'']:
            print ('destination "db" requires database credentials')
            print ('to create them use method "addcred"')
            print ('to provide use option -r like -r mydb')
            sys.exit()
        else:
            try:
                global db
                if debug:
                    print ("Connecting database {} at host {} with user {}".format(mpcred.lc(dbcred,'db'),mpcred.lc(dbcred,'host'),mpcred.lc(dbcred,'user')))
                db = mysql.connect(host=mpcred.lc(dbcred,'host'),user=mpcred.lc(dbcred,'user'),passwd=mpcred.lc(dbcred,'passwd'),db=mpcred.lc(dbcred,'db'))
            except:
                print ('database coul not be connected')
                print (' ... aborting ...')
                sys.exit()            

    if debug:
        print ("Option u: debug mode switched on ...")
        print ("------------------------------------")
        print ("Destination", destination, location)

    if source == 'mqtt':
        client = mqtt.Client()
        # Authentication part
        if not user in ['',None,'None','-']: 
            #client.tls_set(tlspath)  # check http://www.steves-internet-guide.com/mosquitto-tls/
            client.username_pw_set(user, password=password)  # defined on broker by mosquitto_passwd -c passwordfile user
        client.on_connect = on_connect
        # on message needs: stationid, destination, location
        client.on_message = on_message
        client.connect(broker, port, timeout)
        client.loop_forever()
    elif source == 'wamp':
        print ("Not yet supported! -> check autobahn import, crossbario")
    else:
        print ("Additional protocols can be added in future:")
Exemple #24
0
def main(argv):
    broker = 'localhost'  # default
    port = 1883
    timeout = 60
    altbroker = ''
    user = ''
    password = ''
    logging = 'sys.stdout'
    global destination
    destination = 'stdout'
    global location
    location = '/tmp'
    global credentials
    credentials = ''
    global offset
    offset = ''
    global dbcred
    dbcred = ''
    global stationid
    stationid = 'wic'
    global stid
    stid = stationid
    global webpath
    webpath = './web'
    global webport
    webport = 8080
    global instrument
    instrument = ''
    global revision
    revision = 'fix'
    global telegramconf
    telegramconf = ''
    global addlib
    addlib = []
    global source
    source = 'mqtt'  # projected sources: mqtt (default), wamp, mysql, postgres, etc
    global debug
    debug = False
    global output
    global headstream
    headstream = {}
    global topic_identifiers
    topic_identifiers = {}
    global class_reference
    class_reference = {}
    #global verifiedlocation
    #verifiedlocation = False
    global dictcheck
    dictcheck = False
    global socketport
    global number
    number = 1
    global qos
    qos = 0
    global blacklist
    blacklist = []

    usagestring = 'collector.py -b <broker> -p <port> -t <timeout> -o <topic> -i <instrument> -d <destination> -v <revision> -l <location> -c <credentials> -r <dbcred> -q <qos> -u <user> -P <password> -s <source> -f <offset> -m <marcos> -n <number> -e <telegramconf> -a <addlib>'
    try:
        opts, args = getopt.getopt(
            argv, "hb:p:t:o:i:d:vl:c:r:q:u:P:s:f:m:n:e:a:U", [
                "broker=", "port=", "timeout=", "topic=", "instrument=",
                "destination=", "revision=", "location=", "credentials=",
                "dbcred=", "qos=", "debug=", "user="******"password="******"source=",
                "offset=", "marcos=", "number=", "telegramconf=", "addlib="
            ])
    except getopt.GetoptError:
        print('Check your options:')
        print(usagestring)
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('------------------------------------------------------')
            print('Usage:')
            print(usagestring)
            print('------------------------------------------------------')
            print('Options:')
            print('-h                             help')
            print(
                '-b                             set broker address: default = localhost'
            )
            print('-p                             set port - default is 1883')
            print('-t                             set timeout - default is 60')
            print(
                '-o                             set base topic - for MARTAS this corresponds'
            )
            print(
                '                               to the station ID (e.g. wic)')
            print(
                '                               use "-o all" to get all stationids at a specific broker'
            )
            print(
                '-i                             choose instrument(s) - only sensors containing'
            )
            print(
                '                               the provided string are used: '
            )
            print(
                '                               -i GSM  will access GSM90_xxx and GSM19_xyz '
            )
            print('                               Default is to use all')
            print(
                '-d                             set destination - std.out, db, file'
            )
            print('                               default is std.out')
            print('-v                             (no option)')
            print(
                '                               if provided: meta information will be added, revision'
            )
            print(
                '                                     automatically assigned.')
            print(
                '                               if not set: revision 0001 will be used, no meta.'
            )
            print(
                '-l                             set location depending on destination'
            )
            print('                               if d="file": provide path')
            print('                               if d="std.out": not used')
            print(
                '-c                             set mqtt communication credential keyword'
            )
            print(
                '-q                             set mqtt quality of service (default = 0)'
            )
            print('-r                             set db credential keyword')
            print(
                '-s                             source protocol of data: default is mqtt'
            )
            print('                               other options:')
            print(
                '                               -s wamp    not yet implemented.'
            )
            print(
                '-f                             offset values. Can either be "db" for'
            )
            print(
                '                               applying delta values from db or a string'
            )
            print(
                '                               of the following format (key:value):'
            )
            print('                               -f "t1:3.234,x:45674.2"')
            print('                               other options:')
            print('-m                             marcos configuration file ')
            print(
                '                               e.g. "/home/cobs/marcos.cfg"')
            print('-n                             provide a integer number ')
            print(
                '                               "-d diff -i GSM": difference of two GSM will'
            )
            print(
                '                                                 be calculated every n th step.'
            )
            print(
                '-e                             provide a path to telegram configuration for '
            )
            print(
                '                               sending critical log changes.')
            print(
                '-a                             additional MQTT translation library '
            )
            print('------------------------------------------------------')
            print('Examples:')
            print('1. Basic')
            print('   python collector.py -b "192.168.0.100" -o wic')
            print('2. Writing to file in directory "/my/path/"')
            print(
                '   python collector.py -b "192.168.0.100" -d file -l "/my/path" -o wic'
            )
            print('3. Writing to file and stdout')
            print(
                '   python collector.py -b "192.168.0.100" -d file,stdout -l "/tmp" -o wic'
            )
            print('4. Writing to db')
            print(
                '   python collector.py -b "192.168.0.100" -d db -r mydb -o wic'
            )
            print('   python collector.py -d "db,file" -r testdb')
            print('5. Using configuration file')
            print('   python collector.py -m "/path/to/marcos.cfg"')
            print('6. Overriding individual parameters from config file')
            print(
                '   python collector.py -m "/path/to/marcos.cfg" -b "192.168.0.100"'
            )
            print('   (make sure that config is called first)')
            print('7. Calculating differences/gradients on the fly:')
            print('   python collector.py -d diff -i G823A -n 10')
            print(
                '   (will calculate the diffs of two G823A every 10th record)')
            sys.exit()
        elif opt in ("-m", "--marcos"):
            marcosfile = arg
            print("Getting all parameters from configration file: {}".format(
                marcosfile))
            conf = acs.GetConf(marcosfile)
            if not conf.get('logging', '') == '':
                logging = conf.get('logging').strip()
            if not conf.get('broker', '') == '':
                broker = conf.get('broker').strip()
            if not conf.get('mqttport', '') in ['', '-']:
                port = int(conf.get('mqttport').strip())
            if not conf.get('mqttdelay', '') in ['', '-']:
                timeout = int(conf.get('mqttdelay').strip())
            if not conf.get('mqttuser', '') in ['', '-']:
                user = conf.get('mqttuser').strip()
            if not conf.get('mqttqos', '') in ['', '-']:
                try:
                    qos = int(conf.get('mqttqos').strip())
                except:
                    qos = 0
            if not conf.get('mqttcredentials', '') in ['', '-']:
                credentials = conf.get('mqttcredentials').strip()
            if not conf.get('blacklist', '') in ['', '-']:
                blacklist = conf.get('blacklist').split(',')
                blacklist = [el.strip() for el in blacklist]
            if not conf.get('station', '') in ['', '-']:
                stationid = conf.get('station').strip()
                stid = stationid
            if not conf.get('destination', '') in ['', '-']:
                destination = conf.get('destination').strip()
            if not conf.get('filepath', '') in ['', '-']:
                location = conf.get('filepath').strip()
            if not conf.get('databasecredentials', '') in ['', '-']:
                dbcred = conf.get('databasecredentials').strip()
            if not conf.get('revision', '') in ['', '-']:
                destination = conf.get('revision').strip()
            if not conf.get('offset', '') in ['', '-']:
                offset = conf.get('offset').strip()
            if not conf.get('debug', '') in ['', '-']:
                debug = conf.get('debug').strip()
                if debug in ['True', 'true']:
                    debug = True
                else:
                    debug = False
            if not conf.get('socketport', '') in ['', '-']:
                try:
                    socketport = int(conf.get('socketport').strip())
                except:
                    print(
                        'socketport could not be extracted from  marcos config file'
                    )
                    socketport = 5000
            if not conf.get('webport', '') in ['', '-']:
                try:
                    webport = int(conf.get('webport').strip())
                except:
                    print(
                        'webport could not be extracted from marcos config file'
                    )
                    webport = 8080
            if not conf.get('webpath', '') in ['', '-']:
                webpath = conf.get('webpath').strip()
            if not conf.get('telegramconf', '') in ['', '-']:
                telegramconf = conf.get('telegramconf').strip()
            if not conf.get('addlib', '') in ['', '-']:
                addlib = conf.get('addlib').strip().split(',')
            source = 'mqtt'
        elif opt in ("-b", "--broker"):
            broker = arg
        elif opt in ("-p", "--port"):
            try:
                port = int(arg)
            except:
                port = arg
        elif opt in ("-t", "--timeout"):
            try:
                timeout = int(arg)
            except:
                timeout = arg
        elif opt in ("-o", "--topic"):
            stationid = arg
        elif opt in ("-i", "--instrument"):
            instrument = arg
        elif opt in ("-s", "--source"):
            source = arg
        elif opt in ("-d", "--destination"):
            destination = arg
        elif opt in ("-l", "--location"):
            location = arg
        elif opt in ("-c", "--credentials"):
            credentials = arg
        elif opt in ("-v", "--revision"):
            revision = "free"
        elif opt in ("-r", "--dbcred"):
            dbcred = arg
        elif opt in ("-q", "--qos"):
            try:
                qos = int(arg)
            except:
                qos = 0
        elif opt in ("-u", "--user"):
            user = arg
        elif opt in ("-P", "--password"):
            password = arg
        elif opt in ("-f", "--offset"):
            offset = arg
        elif opt in ("-n", "--number"):
            number = arg
        elif opt in ("-e", "--telegramconf"):
            telegramconf = arg
        elif opt in ("-a", "--addlib"):
            addlib = arg.split(',')
        elif opt in ("-U", "--debug"):
            debug = True

    if debug:
        print("collector starting with the following parameters:")
        print(
            "Logs: {}; Broker: {}; Topic/StationID: {}; QOS: {}; MQTTport: {}; MQTTuser: {}; MQTTcredentials: {}; Data destination: {}; Filepath: {}; DB credentials: {}; Offsets: {}"
            .format(logging, broker, stationid, qos, port, user, credentials,
                    destination, location, dbcred, offset))

    try:
        ##  Start Twisted logging system
        ##  ----------------------------
        if logging == 'sys.stdout':
            log.startLogging(sys.stdout)
        else:
            try:
                print(" -- Logging to {}".format(logging))
                log.startLogging(open(logging, 'a'))
                log.msg("----------------")
                log.msg("  -> Logging to {}".format(logging))
            except:
                log.startLogging(sys.stdout)
                log.msg("Could not open {}. Switching log to stdout.".format(
                    logging))
    except:
        print("Logging requires twisted module")
        sys.exit()

    try:
        ##  Eventually import additional libraries
        ##  ----------------------------
        if addlib and len(addlib) > 0:
            print("Importing additional library")
            for lib in addlib:
                exec("from libmqtt.{} import {}".format(lib, lib))
                exec("c{} = {}()".format(lib, lib))
                class_reference[lib] = eval("c{}".format(lib))
                topic_identifiers[lib] = eval(
                    "c{}.topicidentifier".format(lib))
                print("Imported library {}: Topic identifiers are {}".format(
                    lib, topic_identifiers[lib]))
    except:
        pass

    if debug:
        log.msg(
            "Logs: {}; Broker: {}; Topic/StationID: {}; QOS: {}; MQTTport: {}; MQTTuser: {}; MQTTcredentials: {}; Data destination: {}; Filepath: {}; DB credentials: {}; Offsets: {}"
            .format(logging, broker, stationid, qos, port, user, credentials,
                    destination, location, dbcred, offset))

    log.msg("----------------")
    log.msg(" Starting collector {}".format(__version__))
    log.msg("----------------")

    if not qos in [0, 1, 2]:
        qos = 0

    if 'stringio' in destination:
        output = StringIO()
    if 'file' in destination:
        if location in [None, ''] and not os.path.exists(location):
            log.msg(
                'destination "file" requires a valid path provided as location'
            )
            log.msg(' ... aborting ...')
            sys.exit()
    if 'websocket' in destination:
        if ws_available:
            # 0.0.0.0 makes the websocket accessable from anywhere
            global wsserver
            wsserver = WebsocketServer(socketport, host='0.0.0.0')
            wsThr = threading.Thread(target=wsThread, args=(wsserver, ))
            # start websocket-server in a thread as daemon, so the entire Python program exits
            wsThr.daemon = True
            log.msg('starting WEBSOCKET on port ' + str(socketport))
            wsThr.start()
            # start webserver as process, also as daemon (kills process, when main program ends)
            webPr = Process(target=webProcess, args=(webpath, webport))
            webPr.daemon = True
            webPr.start()
            log.msg('starting WEBSERVER on port ' + str(webport))
        else:
            print(
                "no webserver or no websocket-server available: remove 'websocket' from destination"
            )
            sys.exit()
    if 'db' in destination:
        if dbcred in [None, '']:
            log.msg('destination "db" requires database credentials')
            log.msg('to create them use method "addcred"')
            log.msg('to provide use option -r like -r mydb')
            sys.exit()
        else:
            try:
                global db
                if debug:
                    log.msg("Connecting database {} at host {} with user {}".
                            format(mpcred.lc(dbcred, 'db'),
                                   mpcred.lc(dbcred, 'host'),
                                   mpcred.lc(dbcred, 'user')))
                db = mysql.connect(host=mpcred.lc(dbcred, 'host'),
                                   user=mpcred.lc(dbcred, 'user'),
                                   passwd=mpcred.lc(dbcred, 'passwd'),
                                   db=mpcred.lc(dbcred, 'db'))
            except:
                log.msg(
                    'database {} at host {} with user {} could not be connected'
                    .format(mpcred.lc(dbcred, 'db'), mpcred.lc(dbcred, 'host'),
                            mpcred.lc(dbcred, 'user')))
                log.msg(' ... aborting ...')
                sys.exit()

    if debug:
        log.msg("Option u: debug mode switched on ...")
        log.msg("------------------------------------")
        log.msg("Destination: {} {}".format(destination, location))

    if source == 'mqtt':
        client = connectclient(broker, port, timeout, credentials, user,
                               password, qos)
        client.loop_forever()

    elif source == 'wamp':
        log.msg("Not yet supported! -> check autobahn import, crossbario")
    else:
        log.msg("Additional protocols can be added in future...")
Exemple #25
0
    clientip = '138.22.188.181'
    # Path of MARTAS directory on martas machine
    martaspath = '/home/cobs/MARTAS' 
    # Path of MARCOS directory
    homedir = '/home/cobs'
    defaultuser = '******'
    # Provide Station code
    stationid = 'WIC'
    # Select destination (file or db) - Files are saved in .../MARCOS/MartasFiles/
    dest = 'db'
    # For Testing purposes - Print received data to screen:
    printdata = False
    # Please make sure that the db and scp connection data is stored 
    # within the credential file -otherwise provide this data directly

    dbhost = mpcred.lc('cobsdb','host',path='/home/cobs/.magpycred')
    dbuser = mpcred.lc('cobsdb','user',path='/home/cobs/.magpycred')
    dbpasswd = mpcred.lc('cobsdb','passwd',path='/home/cobs/.magpycred')
    dbname = mpcred.lc('cobsdb','db',path='/home/cobs/.magpycred')
    scpuser = mpcred.lc('ceres','user',path='/home/cobs/.magpycred')
    scppasswd = mpcred.lc('ceres','passwd',path='/home/cobs/.magpycred')
    # You can add to the credential file by using: 
    # mpcred.cc('transfer','myshortcut',user='******',passwd='mypasswd',address='no-specific')
    # and than read it by scpuser = mpcred.lc('myshortcut','myuser')
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
    #                 do necessary changes above
    # !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

    print dbhost, dbuser, dbpasswd, dbname, scpuser

    logfile = os.path.join(homedir,'MARCOS','Logs','marcos.log')
Exemple #26
0
def automated_storm_plot(magdata, satdata_1m, satdata_5m, mag_results, sat_results,
        plotvars=['x','var2', 'var1'], logpflux=True, estimate=False, savepath='',upload=False):
           
    date = datetime.strftime(mag_results['ssctime'],'%Y-%m-%d')
    ssctime = mag_results['ssctime']
    ssctime_st = datetime.strftime(ssctime,'%H:%M:%S')
    ssctime_st_long = datetime.strftime(ssctime,'%Y-%m-%dT%H:%M:%S')
    acetime = sat_results['satssctime']
    acetime_st = datetime.strftime(acetime,'%H:%M:%S')
    acetime_st_long = datetime.strftime(acetime,'%Y-%m-%dT%H:%M:%S')
    if logpflux:
        pflux = satdata_5m.ndarray[KEYLIST.index('var1')].astype(float)
        logpflux = np.log10(pflux)
        satdata_5m._put_column(np.asarray(logpflux), 'var1')
    satdata_5m.header['col-var1'] = 'log10(P-flux 47-68 keV)\n   '
    satdata_5m.header['unit-col-var1'] = 'p/cm2-s-ster-MeV'

    magdata = magdata.nfilter()
    dH = mag_results['amp']
    h_max = magdata._get_max(plotvars[0])
    h_min = magdata._get_min(plotvars[0])

    v_wind = sat_results['vwind']
    v_max = satdata_1m._get_max(plotvars[1])
    v_min = satdata_1m._get_min(plotvars[1])
    
    startdate = datetime.strftime(num2date(magdata.ndarray[0][0]), '%Y-%m-%d')
    enddate = datetime.strftime(num2date(magdata.ndarray[0][-1]), '%Y-%m-%d')
    if startdate == enddate:
        datestr = startdate
    else:
        datestr = startdate+' - '+enddate

    fig = plotStreams([magdata, satdata_1m, satdata_5m], [[plotvars[0]], [plotvars[1]], [plotvars[2]]],
            noshow=True,
            plottitle="Magnetic (Conrad Obs) and satellite (ACE SWEPAM+EPAM) data\n(%s)" % datestr)

    axes = gcf().get_axes()
    
    # Draw lines at detection points:
    axes[0].axvline(x=date2num(ssctime),color='red',ls='--',lw=2,zorder=0, clip_on=False)
    axes[1].axvline(x=date2num(acetime),color='gray',ls='--',lw=2,zorder=0, clip_on=False)
    axes[2].axvline(x=date2num(acetime),color='gray',ls='--',lw=2,zorder=0, clip_on=False)
    
    # Give larger borders to lines:
    axes[0].axvline(x=date2num(ssctime), ymin=0, ymax=0.94, color='red',ls='-',lw=15,zorder=0,alpha=0.3, clip_on=False)
    axes[1].axvline(x=date2num(acetime), ymin=0, ymax=0.94, color='gray',ls='-',lw=15,zorder=0,alpha=0.3, clip_on=False)
    axes[2].axvline(x=date2num(acetime), ymin=0.07, ymax=0.94, color='gray',ls='-',lw=15,zorder=0,alpha=0.3, clip_on=False)
    
    # Annotate with automatically detected variables:
    if estimate:
        axes[0].text(0.50, 0.90, "No SSC detected!",
                    verticalalignment='top', horizontalalignment='left',
                    transform=axes[0].transAxes, color='red', style='italic')
        magstring = "Expected SSC:\n%s UTC\ndH = ??? nT" % (ssctime_st)
    else:
        magstring = "SSC:\n%s UTC\ndH = %.1f nT" % (ssctime_st, dH)
        
    axes[0].text(0.78, 0.90, magstring,
                verticalalignment='top', horizontalalignment='left',
                transform=axes[0].transAxes,
                bbox=dict(boxstyle="square",fc='1.0')
                )
    axes[1].text(0.78, 0.90, "CME at ACE:\n%s UTC\nv = %.0f km/s" % (acetime_st, v_wind),
                verticalalignment='top', horizontalalignment='left',
                transform=axes[1].transAxes,
                bbox=dict(boxstyle="square",fc='1.0')
                )

    # Format time to fit nicely on x-axis:
    myFmt = DateFormatter('%H:%M')
    axes[2].xaxis.set_major_formatter(myFmt)

    # Save to output file:
    #plt.show()
    magoutfile = os.path.join(savepath, "stormplot_%s.png" % date)
    plt.savefig(magoutfile)

    if upload:
        cred = 'cobshomepage'
        address=mpcred.lc(cred,'address')
        user=mpcred.lc(cred,'user')
        passwd=mpcred.lc(cred,'passwd')
        port=mpcred.lc(cred,'port')
        remotepath = 'zamg/images/graphs/spaceweather/storms'

        scptransfer(magoutfile,'94.136.40.103:'+remotepath,passwd)

    command = ["cp", magoutfile, "/srv/products/graphs/spaceweather/storms/"]
    print "Executing command %s" % command
    call(command)
    upload = True
Exemple #27
0
def ConnectDatabases(config={}, debug=False):
    """
    DESCRIPTION:
        Database connection
    """

    connectdict = {}
    config['primaryDB'] = None

    dbcreds = config.get('dbcredentials')
    if not isinstance(dbcreds, list):
        dbcreds = [dbcreds]

    # First in list is primary
    for credel in dbcreds:
        # Get credentials
        dbpwd = mpcred.lc(credel, 'passwd')
        dbhost = mpcred.lc(credel, 'host')
        dbuser = mpcred.lc(credel, 'user')
        dbname = mpcred.lc(credel, 'db')

        # Connect DB
        if dbhost:
            try:
                if debug:
                    print(
                        "    -- Connecting to database {} ...".format(credel))
                    print("    -- {} {} {}".format(dbhost, dbuser, dbname))
                connectdict[credel] = mysql.connect(host=dbhost,
                                                    user=dbuser,
                                                    passwd=dbpwd,
                                                    db=dbname)
                if debug:
                    print("...success")
            except:
                pass

    if len(connectdict) == 0:
        print("  No database found - aborting")
        sys.exit()
    else:
        if debug:
            print("    -- at least on db could be connected")

    if connectdict.get(dbcreds[0], None):
        if debug:
            print("    -- primary db is available: {}".format(dbcreds[0]))
        config['primaryDB'] = connectdict[dbcreds[0]]
    else:
        print(
            " Primary database not available - selecting alternative as primary"
        )
        for el in dbcreds:
            if connectdict.get(el, None):
                config['primaryDB'] = connectdict[el]
                print("   -> selected database {} as primary".format(el))
                break

    config['conncetedDB'] = connectdict

    return config
Exemple #28
0
def main(argv):
    statusmsg = {}
    jobs = ''
    tele = ''
    sendlogpath = ''
    try:
        opts, args = getopt.getopt(argv, "hj:m:t:", [
            "jobs=",
            "memory=",
            "telegram=",
        ])
    except getopt.GetoptError:
        print('file_upload.py -j <jobs> -m <memory> -t <telegram>')
        sys.exit(2)
    for opt, arg in opts:
        if opt == '-h':
            print('-------------------------------------')
            print('Description:')
            print(
                '-- file_uploads.py send data to any destination of your choice  --'
            )
            print(
                '-----------------------------------------------------------------'
            )
            print(
                'file_uploads is a python wrapper allowing to send either by')
            print('ftp, scp, sftp using a similar inputs.')
            print('It can hanlde several different uploads at once.')
            print('Upload parameters have to be provided in a json structure.')
            print('file_uploads requires magpy >= 0.9.5.')
            print('-------------------------------------')
            print('Usage:')
            print('file_uploads.py -j <jobs> -m <memory> -t <telegram>')
            print('-------------------------------------')
            print('Options:')
            print('-j (required) : a json structure defining the uplaods')
            print('-m (required) : a path for "memory"')
            print('-------------------------------------')
            print('Example of jobs structure:')
            print(
                '{"wicadjmin":{"path":"/home/leon/Tmp/Temp","destinations":{"gleave":{"type":"sftp", "path" : "/uploads/all-obs"}},'
            )
            print(
                '"log":"/home/leon/Tmp/wicadjart.log","endtime":"utcnow","starttime":2}}'
            )
            print('! please note endtime is usually utcnow')
            print(
                '! starttime is given as a timedelta in days toward endtime, an integer is required'
            )
            print('-------------------------------------')
            print('Application:')
            print(
                'python3 file_uploads.py -j /my/path/uploads.json -m /tmp/sendmemory.json'
            )
            sys.exit()
        elif opt in ("-j", "--jobs"):
            jobs = arg
        elif opt in ("-m", "--memory"):
            sendlogpath = arg
        elif opt in ("-t", "--telegram"):
            tele = arg

    if tele:
        # ################################################
        #          Telegram Logging
        # ################################################

        ## New Logging features
        coredir = os.path.abspath(os.path.join(scriptpath, '..', 'core'))
        sys.path.insert(0, coredir)
        from martas import martaslog as ml
        # tele needs to provide logpath, and config path ('/home/cobs/SCRIPTS/telegram_notify.conf')
        logpath = '/var/log/magpy/mm-fu-uploads.log'

    if jobs == '':
        print('Specify a valid path to a jobs dictionary (json):')
        print('-- check file_uploads.py -h for more options and requirements')
        sys.exit()
    else:
        if os.path.isfile(jobs):
            with open(jobs, 'r') as file:
                workdictionary = json.load(file)
        else:
            print('Specify a valid path to a jobs dictionary (json):')
            print(
                '-- check file_uploads.py -h for more options and requirements'
            )
            sys.exit()

    # make fileupload an independent method importing workingdictionary (add to MARTAS?)
    if not sendlogpath:
        sendlogpath = '/tmp/lastupload.json'
    """
    Main Prog
    """
    try:
        for key in workdictionary:
            name = "FileUploads-{}".format(key)
            print("DEALING with ", key)
            lastfiles = {}
            fulldict = {}
            if os.path.isfile(sendlogpath):
                with open(sendlogpath, 'r') as file:
                    fulldict = json.load(file)
                    lastfiles = fulldict.get(key)
                    # lastfiles looks like: {'/path/to/my/file81698.txt' : '2019-01-01T12:33:12', ...}

            if not lastfiles == {}:
                print("opened memory")
                pass

            sourcepath = workdictionary.get(key).get('path')
            extensions = workdictionary.get(key).get('extensions', [])
            namefractions = workdictionary.get(key).get('namefractions', [])
            # Test if sourcepath is file
            starttime = workdictionary.get(key).get('starttime',
                                                    datetime(1777, 4, 30))
            endtime = workdictionary.get(key).get('endtime', datetime.utcnow())
            if endtime in ["utc", "now", "utcnow"]:
                endtime = datetime.utcnow()
            if isinstance(starttime, int):
                starttime = datetime.utcnow() - timedelta(days=starttime)
            newfiledict, alldic = getchangedfiles(sourcepath, lastfiles,
                                                  starttime, endtime,
                                                  extensions, namefractions)

            print("Found new: {} and all {}".format(newfiledict, alldic))

            for dest in workdictionary.get(key).get('destinations'):
                print("  -> Destination: {}".format(dest))
                address = mpcred.lc(dest, 'address')
                user = mpcred.lc(dest, 'user')
                passwd = mpcred.lc(dest, 'passwd')
                port = mpcred.lc(dest, 'port')
                destdict = workdictionary.get(key).get('destinations')[dest]
                proxy = destdict.get('proxy', None)
                #print (destdict)
                if address and user and newfiledict:
                    for nfile in newfiledict:
                        print("    -> Uploading {} to dest {}".format(
                            nfile, dest))
                        success = uploaddata(nfile,
                                             destdict.get('path'),
                                             destdict.get('type'),
                                             address,
                                             user,
                                             passwd,
                                             port,
                                             proxy=proxy,
                                             logfile=destdict.get(
                                                 'logfile', 'stdout'))
                        print("    -> Success", success)
                        if not success:
                            #remove nfile from alldic
                            # thus it will be retried again next time
                            print(
                                " !---> upload of {} not successful: keeping it in todo list"
                                .format(nfile))
                            del alldic[nfile]

            fulldict[key] = alldic
            writecurrentdata(sendlogpath, fulldict)
        statusmsg[name] = "uploading data succesful"
    except:
        statusmsg[name] = "error when uploading files - please check"

    if tele:
        print(statusmsg)
        martaslog = ml(logfile=logpath, receiver='telegram')
        martaslog.telegram[
            'config'] = '/home/cobs/SCRIPTS/telegram_notify.conf'
        martaslog.msg(statusmsg)

    print("----------------------------------------------------------------")
    print("file upload app finished")
    print("----------------------------------------------------------------")
    print("SUCCESS")
Exemple #29
0
#!/bin/env/python
from magpy.stream import read
import magpy.mpplot as mp
from magpy.database import *
import magpy.opt.cred as mpcred

dbpasswd = mpcred.lc('cobsdb', 'passwd')
db = mysql.connect(host="138.22.188.195",
                   user="******",
                   passwd=dbpasswd,
                   db="cobsdb")

vario1 = read('/srv/archive/WIC/LEMI036_1_0002/raw/*2016-02-28.bin')
vario2 = read('/srv/archive/WIC/LEMI025_22_0002/raw/*2016-02-28.bin')

print vario1.length(), vario2.length()

mp.plotStreams([vario1, vario2], [['z'], ['z']])
Exemple #30
0
        cmetime = cmetime.astimezone(utczone)
        sscdata = reportdata[skip+2].split()
        ssctime = datetime.strptime(sscdata[5]+'T'+sscdata[6], '%Y-%m-%dT%H:%M:%S')
        ssctime = ssctime.replace(tzinfo=cetzone)
        ssctime = ssctime.astimezone(utczone)
        sscamp = np.nan
        vwind = float(reportdata[skip+3].split()[4])
        estimate = True

    if (now - ssctime).seconds > 24.*60.*60.:
        print("Time since SSC exceeds 24 hours. Exiting.")
        sys.exit()
        
    basepath = '/srv/archive'
    dbcred = 'cobsdb'
    dbhost = mpcred.lc(dbcred, 'host')
    dbuser = mpcred.lc(dbcred, 'user')
    dbpasswd = mpcred.lc(dbcred, 'passwd')
    dbname = mpcred.lc(dbcred, 'db')

    try:
        inst = 'FGE_S0252_0001_0001'
        db = mysql.connect(host=dbhost,user=dbuser,passwd=dbpasswd,db=dbname)
        magdata = readDB(db, inst, starttime=cmetime-timedelta(hours=1), endtime=endtime)
        if len(magdata.ndarray[0]) == 0:
            raise ValueError("No data available from database.")
    except:
        print("Reading data from archive...")
        magdata = read(os.path.join(basepath,"WIC","FGE_S0252_0001","FGE_S0252_0001_0001","FGE_S0252_0001_0001_*"),
                   starttime=cmetime-timedelta(hours=1), endtime=now)
    satdata_1m = read(os.path.join(basepath,"external","esa-nasa","ace","collected","ace_1m_*"),