def main():
    """Loads an update from json buffer in midas."""
    odb = midas.ODB('gm2-nmr')

    key = odb.get_value('Custom/Data/json-buffer/key').rstrip()
    fdir = odb.get_value('Custom/Path').rstrip()
    fname = os.path.join(fdir, odb.get_value('Custom/Data/json-buffer/file').rstrip())
    value = odb.get_value('Custom/Data/json-buffer/value').rstrip()

    if key == '' or value == '':
        return 0

    if os.path.exists(fname):
        json_data = json.loads(open(fname).read(),
                               object_pairs_hook=OrderedDict)
                               

    else:
        json_data = {}

    json_data[key] = json.loads(value, object_pairs_hook=OrderedDict)

    f = open(fname, 'w')
    f.write(json.dumps(json_data, indent=2))
    f.close()

    key = odb.set_value('Custom/Data/json-buffer/key', '')
    fname = odb.set_value('Custom/Data/json-buffer/file', '')
    value = odb.set_value('Custom/Data/json-buffer/value', '')

    return 0
Exemple #2
0
def main():

    # Get the data directory
    odb = midas.ODB('gm2-nmr')
    datadir = odb.get_value('/Logger/Data dir').rstrip() + '/'
    metafiles = glob.glob(datadir + '/*/.processing_metadata.json')

    for metafile in metafiles:
        
        with open(metafile) as f:
            mf_string = f.read()
        
        try:
            # Load the json file.
            metadata = json.loads(mf_string)

            # Save it if not corrupted.
            with open(metafile + '.old', 'w') as f:
                f.write(json.dumps(metadata))
        
        except:
            pass
import time
import copy
import logging as log
import hashlib

from subprocess import call, Popen

import numpy as np
import setproctitle
import zmq
import gevent
import midas

# Create a zmq context, global since you only need one.
context = zmq.Context()
odb = midas.ODB('gm2-nmr')
nworkers = 4
maxcount = 20
datadir = odb.get_value('/Logger/Data dir').rstrip()
offline_dir = os.path.abspath(os.path.dirname(__file__) + '/../..')

# Set up the zmq socket for data.
crunch_sck = context.socket(zmq.PULL)
crunch_sck.bind('tcp://127.0.1.1:44444')

# Set up the zmq socket for logging.
logger_sck = context.socket(zmq.PULL)
logger_sck.bind('tcp://127.0.1.1:44445')

# Worker socket to send job results.
worker_sck = context.socket(zmq.PUSH)
def main():

    # Open the ZMQ socket that talks to the job scheduler.
    ctx = zmq.Context()
    job_sck = ctx.socket(zmq.PUSH)
    job_sck.connect('tcp://127.0.1.1:44444')

    req_sck = ctx.socket(zmq.REQ)
    req_sck.connect('tcp://127.0.1.1:44446')

    # Get the data directory
    odb = midas.ODB('gm2-nmr')
    datadir = odb.get_value('/Logger/Data dir').rstrip() + '/'

    # Intialize a standard job submission.
    info = {'run': 1, 'type': 'standard'}

    # Update only new runs.
    if sys.argv[1] == 'update':

        metafile = datadir + 'crunched/.crunchd_metadata.json'
        crunched_runs = json.load(open(metafile)).keys()
        crunched_runs.sort()

        last_crunch = 1
        while (('%05i' % last_crunch) in crunched_runs):
            last_crunch += 1

        current_run = int(odb.get_value('/Runinfo/Run number'))
        current_status = int(odb.get_value('/Runinfo/State'))

        # If the experiment isn't running, bump the run number
        if current_status == 1:
            current_run += 1

        print last_crunch, current_run

        if last_crunch >= current_run - 1:
            print 'Runs are all crunched.'

        else:
            for i in xrange(last_crunch, current_run):
                info['cmd'] = 'crunch'
                info['run'] = i
                rc = job_sck.send_json(info)

    # Update all runs given
    elif sys.argv[1] == 'crunch':

        info['cmd'] = 'crunch'

        for i in xrange(2, len(sys.argv)):

            try:
                info['run'] = int(sys.argv[i])

            except (ValueError):
                print 'Not a valid run number.'
                continue

            rc = job_sck.send_json(info)

    elif sys.argv[1] == 'reset':

        info['cmd'] = 'reset'

        for i in xrange(2, len(sys.argv)):

            try:
                info['run'] = int(sys.argv[i])

            except (ValueError):
                print 'Not a valid run number.'
                continue

            rc = job_sck.send_json(info)

    elif sys.argv[1] == 'status':

        msg = {'type': 'status'}
        rc = req_sck.send_json(msg)

        print req_sck.recv_json()

    else:
        print 'Unrecognized command.'
def main():

    # Create the ZMQ sockets that talk to the job scheduler.
    ctx = zmq.Context()
    job_sck = ctx.socket(zmq.PUSH)
    job_sck.connect('tcp://127.0.1.1:44444')

    req_sck = ctx.socket(zmq.REQ)
    req_sck.connect('tcp://127.0.1.1:44446')

    # Grab the data directory from the experiment's ODB.
    odb = midas.ODB('gm2-nmr')
    #    datadir = odb.get_value('/Logger/Data dir').rstrip() + '/'
    datadir = '/home/newg2/data/'

    if len(sys.argv) < 2:
        print "Insufficient arguments."
        print "usage: python update_full_scans.py <command> [runs ...]"
        sys.exit(1)

    if sys.argv[1] == 'add':
        print "Adding new full scan."

        run_num = 1
        runfile_glob = datadir + '/bundles/run_list_full_scan*%03i.txt'

        while len(glob.glob(runfile_glob % run_num)):
            run_num += 1

        if len(sys.argv) < 3:
            print 'Cannot create a full scan with no runs.'
            sys.exit(1)

        runs = []
        for i in range(2, len(sys.argv)):
            runs.append(int(sys.argv[i]))

        print 'Next full scan is run number %i.' % run_num
        print 'Writing the run list.'

        runfile = datadir + '/bundles/run_list_full_scan_%03i.txt'
        runfile = runfile % run_num

        with open(runfile, 'w') as f:
            runlist = [str(run) for run in runs]
            f.write(' '.join(runlist) + '\n')

        print 'The run list is complete. To process the full scan now, type:'
        print ''
        print 'python scripts/full_scan_helper.py update %i' % run_num
        print ''

    elif sys.argv[1] == 'update':
        print "Rebundling full scans."

        runfiles = []

        # If runs were supplied, check those.
        if len(sys.argv) > 2:
            for i in range(2, len(sys.argv)):
                run = int(sys.argv[i])
                runfile = datadir + 'bundles/run_list_full_scan_%03i.txt' % run
                runfiles.append(runfile)

        # If not, check all of them.
        else:
            runfiles = glob.glob(datadir + 'bundles/run_list*.txt')

        runfiles.sort()
        runfiles.reverse()

        for runfile in runfiles:
            scan_idx = int(runfile[-7:-4])
            runlist = np.genfromtxt(runfile, dtype=np.int).tolist()

            # Submit all the jobs that the full scan depends on.
            for run in runlist:
                job_sck.send_json({'type': 'normal', 'run': run})

            print 'Ensuring all prerequisite jobs have run.'

            while len(runlist) != 0:

                tmplist = copy.copy(runlist)
                print '%i job completions unverified.' % len(tmplist)
                print tmplist

                for run in runlist:
                    req = {}
                    req['type'] = 'check'
                    req['body'] = {}
                    req['body']['type'] = 'normal'
                    req['body']['msg'] = {'run': run, 'type': 'normal'}
                    req_sck.send_json(req)
                    msg = req_sck.recv_json()

                    if msg['result'] == False:
                        runlist.remove(run)

            job_sck.send_json({'type': 'bundle-full_scan', 'run': scan_idx})

    else:
        print "Unrecognized argument."
        print "usage: python update_full_scans.py <command> [runs ...]"
Exemple #6
0
def main():
    attr_file = '/home/newg2/Applications/gm2-nmr/resources/log/midas_runlog.json'
    arch_dir = '/home/newg2/Applications/gm2-nmr/resources/history'

    try:
        run_attr = json.loads(open(attr_file).read(),
                              object_pairs_hook=OrderedDict)

    except:
        run_attr = json.loads('{}', object_pairs_hook=OrderedDict)

    odb = midas.ODB('gm2-nmr')

    if len(sys.argv) > 1:
        start = int(sys.argv[1])

    else:
        start = 1

    if len(sys.argv) > 2:
        stop = int(sys.argv[2])

    else:
        stop = int(odb.get_value('/Runinfo/Run number').rstrip())

        if (odb.get_value('/Runinfo/State').rstrip() != '1'):
            stop -= 1

    for run_num in range(start, stop + 1):
        run_files = glob.glob(arch_dir + '/*%05i*' % run_num)
        key = "run_%05i" % run_num
        run_attr[key] = OrderedDict()

        # Set the order and initialize.
        run_attr[key]['comment'] = ''
        run_attr[key]['tags'] = []
        run_attr[key]['start_time'] = 0
        run_attr[key]['stop_time'] = 0
        run_attr[key]['field_on'] = False
        run_attr[key]['step_size'] = 0.0
        run_attr[key]['laser_point'] = ''
        run_attr[key]['laser_swap'] = False
        run_attr[key]['laser_phi_offset'] = 0.0
        run_attr[key]['metrolab_angle'] = -1.0

        for f in run_files:

            got_step_size = False

            for line in open(f):

                if 'xml' in f:

                    if '"Comment" type="STRING" size="80"' in line:
                        comment = line.split('>')[1].split('<')[0].rstrip()
                        run_attr[key]['comment'] = comment

                    if '"Tags" type="STRING"' in line:
                        tags = line.split('>')[1].split('<')[0].rstrip()
                        run_attr[key]['tags'] = tags.split(',')

                    if '"Start time binary" type="DWORD"' in line:
                        start_time = int(line.split('>')[1].split('<')[0])
                        run_attr[key]['start_time'] = start_time

                    if '"Stop time binary" type="DWORD"' in line:
                        stop_time = int(line.split('>')[1].split('<')[0])
                        run_attr[key]['stop_time'] = stop_time

                    if '"Field On" type="BOOL"' in line:
                        field_on = bool(line.split('>')[1].split('<')[0])
                        run_attr[key]['field_on'] = field_on

                    if '"Step Size" type="DOUBLE"' in line:
                        step_size = float(line.split('>')[1].split('<')[0])
                        if not got_step_size:
                            run_attr[key]['step_size'] = step_size
                            got_step_size = True

                    if '"Laser Tracker Point" type="STRING"' in line:

                        laser_point = line.split('>')[1].split('<')[0].upper()
                        laser_point = laser_point.strip()

                        if laser_point != 'P1' and laser_point != 'P2':
                            laser_point = 'N'

                        run_attr[key]['laser_point'] = laser_point

                    if '"Laser Phi Offset" type="DOUBLE"' in line:
                        laser_phi_offset = float(
                            line.split('>')[1].split('<')[0])
                        run_attr[key]['laser_phi_offset'] = laser_phi_offset

                    if '"Metrolab Angle" type="DOUBLE"' in line:
                        metrolab_angle = float(
                            line.split('>')[1].split('<')[0])
                        run_attr[key]['metrolab_angle'] = metrolab_angle

                else:

                    if 'Comment = STRING : [80] ' in line:
                        comment = line.split(': [80] ')[1].rstrip()
                        run_attr[key]['comment'] = comment

                    if 'Tags = STRING : [128] ' in line:
                        tags = line.split(': [128] ')[1].rstrip()
                        run_attr[key]['tags'] = tags.split(',')

                    if 'Start time binary = DWORD' in line:
                        start_time = int(line.split(':')[1])
                        run_attr[key]['start_time'] = start_time

                    if 'Stop time binary = DWORD' in line:
                        stop_time = int(line.split(':')[1])
                        run_attr[key]['stop_time'] = stop_time

                    if 'Field On = BOOL' in line:
                        field_on = bool(line.split(':')[1])
                        run_attr[key]['field_on'] = field_on

                    if 'Step Size = DOUBLE' in line:
                        step_size = float(line.split(':')[1])
                        if not got_step_size:
                            run_attr[key]['step_size'] = step_size
                            got_step_size = True

                    if 'Laser Tracker Point = STRING' in line:

                        laser_point = line.split(':')[1].split(']')[1].upper()
                        laser_point = laser_point.strip()

                        if laser_point != 'P1' and laser_point != 'P2':
                            laser_point = 'N'

                        run_attr[key]['laser_point'] = laser_point

                    if 'Laser Phi Offset = DOUBLE' in line:
                        laser_phi_offset = float(line.split(':')[1])
                        run_attr[key]['laser_phi_offset'] = laser_phi_offset

                    if 'Metrolab Angle = FLOAT' in line:
                        metrolab_angle = float(line.split(':')[1])
                        run_attr[key]['metrolab_angle'] = metrolab_angle

        try:
            run_attr[key]['step_size']

        except:
            run_attr[key]['step_size'] = -1.0

        try:
            run_attr[key]['laser_point']

        except:
            run_attr[key]['laser_point'] = 'N'

        try:
            run_attr[key]['laser_swap']

        except:
            run_attr[key]['laser_swap'] = False

        print run_attr[key]

    with open(attr_file, 'w') as f:
        f.write(json.dumps(run_attr, indent=2))