コード例 #1
0
def main():

    first_run = sys.argv[1]
    last_run = sys.argv[2]

    # DB Connection
    con_str = os.environ["RCDB_CONNECTION"] \
        if "RCDB_CONNECTION" in os.environ.keys() \
        else "mysql://[email protected]:3306/a-rcdb"
    #con_str = "mysql://[email protected]:3306/a-rcdb"
    db = rcdb.RCDBProvider(con_str)

    result = db.select_runs("run_type != 'Junk'", first_run, last_run)
    for run in result:
        start_time = datetime.strftime(run.start_time, "%Y-%m-%d %H:%M:%S")
        try:
            cmds = ["myget", "-c", "FlipState", "-t", start_time]
            cond_out = subprocess.Popen(
                cmds, stdout=subprocess.PIPE).stdout.read().strip()
            value = cond_out.split()[2]
            if int(value) == 2:
                flip = 'FLIP-RIGHT'
            elif int(value) == 3:
                flip = 'FLIP-LEFT'
            else:
                print "flip state value sometime else....", value, " skip this run", run.number
                continue

            if TESTMODE:
                print run.number, value
            else:
                print run.number, flip
                db.add_condition(run, ParityConditions.FLIP_STATE, flip, True)
        except Exception as e:
            print e
コード例 #2
0
ファイル: test_select_values.py プロジェクト: rjones30/rcdb
    def setUp(self):
        self.db = rcdb.RCDBProvider("sqlite://", check_version=False)
        destroy_all_create_schema(self.db)
        runs = {}
        # create runs
        for i in range(1, 6):
            runs[i] = self.db.create_run(i)

        runs[9] = self.db.create_run(9)
        self.runs = runs

        self.db.create_condition_type("a", ConditionType.INT_FIELD,
                                      "Test condition 'a'")
        self.db.create_condition_type("b", ConditionType.FLOAT_FIELD,
                                      "Test condition 'b'")
        self.db.create_condition_type("c", ConditionType.BOOL_FIELD,
                                      "Test condition 'v'")
        self.db.create_condition_type("d", ConditionType.STRING_FIELD,
                                      "Test condition 'd'")
        self.db.create_condition_type("e", ConditionType.JSON_FIELD,
                                      "Test condition 'e'")
        self.db.create_condition_type("f", ConditionType.STRING_FIELD,
                                      "Test condition 'f'")
        self.db.create_condition_type("g", ConditionType.BLOB_FIELD,
                                      "Test condition 'g'")

        # create conditions
        self.db.add_condition(1, "a", 1)
        self.db.add_condition(2, "a", 2)
        self.db.add_condition(3, "a", 3)
        self.db.add_condition(4, "a", 4)
        self.db.add_condition(9, "a", 9)

        self.db.add_condition(1, "b", 1.01)
        self.db.add_condition(2, "b", 7.0 / 3.0)
        self.db.add_condition(3, "b", 2.55)
        self.db.add_condition(4, "b", 1.64)
        self.db.add_condition(5, "b", 2.32)
        self.db.add_condition(9, "b", 2.02)

        self.db.add_condition(1, "c", False)
        self.db.add_condition(2, "c", True)
        self.db.add_condition(3, "c", True)
        self.db.add_condition(4, "c", True)
        self.db.add_condition(5, "c", False)
        self.db.add_condition(9, "c", True)

        self.db.add_condition(1, "d", "haha")
        self.db.add_condition(4, "d", "hoho")
        self.db.add_condition(5, "d", "bang")
        self.db.add_condition(9, "d", "mew")

        self.db.add_condition(1, "e", '{"a":1}')
        self.db.add_condition(4, "e", "[1,2,3]")
        self.db.add_condition(9, "e", '[3,2,{"b":5}]')

        self.db.add_condition(4, "f", "my only value")

        self.db.add_condition(5, "g", "aGVsbG8gd29ybGQ=")
        """
コード例 #3
0
def get_dpp_run(run_num):
    con_str = os.environ["RCDB_CONNECTION"] \
        if "RCDB_CONNECTION" in os.environ.keys() \
        else "mysql://[email protected]:3306/a-rcdb"

    db = rcdb.RCDBProvider(con_str)
    run = db.get_run(run_num)
    if not db.get_run(run_num):
        print "run not found in DB: ", run_num
        return

    brtime = datetime.strftime(run.start_time, "%Y-%m-%d %H:%M:%S")
    if run.end_time is not None:
        ertime = datetime.strftime(run.end_time, "%Y-%m-%d %H:%M:%S")
    else:
        # use fixer.. abort the program for now
        return

    dpp, sig_dpp = get_dpp(brtime, ertime)
    """
    print "Start: ", brtime
    print "End: ", ertime
    print "get_dpp_run: ", dpp, sig_dpp
    """
    return dpp, sig_dpp
コード例 #4
0
def GetRunInfo():

	# Get the list of runs to process and the number of EVIO files for each.
	# The list is returned in the form of a dictionary with the run numbers
	# as keys and the maximum evio file number for that run as values.
	# Which runs show up in the list depends on how the RUNS and RCDB_QUERY
	# globals are set:
	#
	# RUNS is not None: All runs in the list are included
	# RUNS is empty and RCDB_QUERY is None: All runs in the range MINRUN-MAXRUN inclusive are included
	# RUNS is empty and RCDB_QUERY is not None: RCDB is queried for the list of runs.
	#
	# n.b. that for the first 2 options above, the GetNumEVIOFiles routine
	# below is called which queries the RCDB via mysql directly so the RCDB
	# python module does not actually need to be in PYTHONPATH. For the 3rd
	# option, the RCDB python API is used so it is needed.

	global RUNS, MINRUN, MAXRUN, RCDB_QUERY, RUN_LIST_SOURCE, BAD_RCDB_QUERY_RUNS
	good_runs = {}
	
	# If RCDB_QUERY is not defined, define with value None
	try: RCDB_QUERY
	except : RCDB_QUERY = None

	# Query through RCDB API
	if len(RUNS)==0 and RCDB_QUERY!=None:
		RUN_LIST_SOURCE = 'RCDB ' + str(MINRUN) + '-' + str(MAXRUN) + ' (query="' + RCDB_QUERY + '")'
		print 'Querying RCDB for run list ....'

		# Import RCDB python module. Add a path on the CUE just in case
		# PYTHONPATH is not already set
		sys.path.append('/group/halld/Software/builds/Linux_CentOS7-x86_64-gcc4.8.5/rcdb/rcdb_0.04.00/python')
		import rcdb

		db = rcdb.RCDBProvider('mysql://' + RCDB_USER + '@' + RCDB_HOST + '/rcdb')
		print 'RCDB_QUERY = ' + RCDB_QUERY
		for r in db.select_runs(RCDB_QUERY, MINRUN, MAXRUN):
			evio_files_count = r.get_condition_value('evio_files_count')
			if evio_files_count == None:
				print('ERROR in RCDB: Run ' + str(r.number) + ' has no value for evio_files_count! Skipping ...')
				BAD_RCDB_QUERY_RUNS.append( int(r.number) )
				continue
			good_runs[r.number] = int(evio_files_count)
	elif len(RUNS)==0 :
		RUN_LIST_SOURCE = 'All runs in range ' + str(MINRUN) + '-' + str(MAXRUN)
		print 'Getting info for all runs in range ' + str(MINRUN) + '-' + str(MAXRUN) + ' ....'
		for RUN in range(MINRUN, MAXRUN+1): good_runs[RUN] = GetNumEVIOFiles(RUN)
	else:
		RUN_LIST_SOURCE = 'Custom list: ' + ' '.join([str(x) for x in RUNS])
		print 'Getting info for runs : ' + ' '.join([str(x) for x in RUNS])
		for RUN in RUNS: good_runs[RUN] = GetNumEVIOFiles(RUN)

	# Filter out runs in the EXCLUDE_RUNS list
	global EXCLUDE_RUNS
	good_runs_filtered = {}
	for run in good_runs.keys():
		if run not in EXCLUDE_RUNS: good_runs_filtered[run] = good_runs[run]

	return good_runs_filtered
コード例 #5
0
    def __init__(self, con_str=None):
        self.connection_string = ""
        self.run_type = None
        self.wac_comment = None
        self.run_flag = None
        self.is_connected = None
        self.run_number = None
        self.run = None

        if con_str and self.is_connected is not True:
            print "connect to DB"
            self.db = rcdb.RCDBProvider(con_str)
            self.is_connected = True
コード例 #6
0
    def GetParityRunIDs(self,
                        ProductionRunID=0,
                        targName="D-208Pb10-D",
                        SearchStartRunID=2573,
                        SearchEndRunID=8000):
        '''
        :param ProductionRunID:
        :param targName:
        :param SearchStartRunID:
        :param SearchEndRunID:
        :return:
        '''
        ParityRunList = []
        endSearchTime = datetime.datetime.strptime(
            self.GetProductionStartTime(
                productionRunID=ProductionRunID).strftime("%Y-%m-%d %H:%M:%S"),
            "%Y-%m-%d %H:%M:%S")
        if endSearchTime is not None:
            runs = []
            for x in range(int(SearchStartRunID), int(SearchEndRunID) + 1):
                runs.append(x)

            # DB connection
            con_str = "mysql://[email protected]:3306/a-rcdb"
            db = rcdb.RCDBProvider(con_str)

            result = db.select_runs("", runs[0], runs[-1])
            for run in result:
                runno = str(run.number)
                # from db
                run_type = run.get_condition_value("run_type")
                target_type = run.get_condition_value("target_type")
                run_flag = run.get_condition_value("run_flag")
                arm_flag = run.get_condition_value("arm_flag")
                start_time = run.start_time
                end_time = run.end_time

                if target_type is None:
                    continue

                if endSearchTime > start_time and targName in target_type:
                    print(
                        "run {}  target {}  startTime {}    :: end {}".format(
                            runno, target_type, start_time, endSearchTime))
                    ParityRunList.append(int(runno))
        return ParityRunList
コード例 #7
0
def remove_comments(con_str):
    """

    :param con_str: DB connection string
    :return:
    """
    db = rcdb.RCDBProvider(con_str)
    comment_type = db.get_condition_type("user_comment")

    results = db.session.query(Condition).join(Condition.type)\
        .filter(Condition.type == comment_type)\
        .filter(Condition.text_value.like('%CONFIG FILE%')).all()

    for result in results:
        print(result)
        result.text_value = ""

    db.session.commit()
コード例 #8
0
def read_conditions(run_number):
    # DB Connection (read-only)
    con_str = "mysql://[email protected]:3306/a-rcdb"

    db = rcdb.RCDBProvider(con_str)

    # returns Run object (None if the run doesn't exsist in DB)
    run = db.get_run(run_number)
    if not run:
        print("ERROR: Run %s is not found in DB" % run_number)
        sys.exit(1)

    #start time and end time
    start_time = run.start_time
    end_time = run.end_time
    """
    get condition value
    example: run_length
    returns condition_name and value
    """
    value = db.get_condition(run, "ihwp").value
    print value
コード例 #9
0
def make_dummy_db():
    """ Creates inmemory SQLite database"""

    # create in memory SQLite database
    db = rcdb.RCDBProvider("sqlite://")
    rcdb.model.Base.metadata.create_all(db.engine)

    print("Dummy memory database created!")

    # create conditions types
    event_count_type = db.create_condition_type("event_count", ConditionType.INT_FIELD)
    data_value_type = db.create_condition_type("data_value", ConditionType.FLOAT_FIELD)
    tag_type = db.create_condition_type("tag", ConditionType.STRING_FIELD)

    # create runs and fill values
    for i in range(0, 100):
        run = db.create_run(i)
        db.add_condition(run, event_count_type, i + 950)      # event_count in range 950 - 1049
        db.add_condition(i, data_value_type, (i/100.0) + 1)   # data_value in 1 - 2
        db.add_condition(run, tag_type, "tag" + str(i))       # Some text data

    print("Runs filled with data")
    return db
コード例 #10
0
ファイル: Misc.py プロジェクト: ademus4/clas12-utilities
def getHWP(run_min, run_max):

    db = rcdb.RCDBProvider('mysql://[email protected]/rcdb')

    prev_hwp, run_start = None, None

    ret = []

    for x in db.select_values(['half_wave_plate'], '', run_min, run_max):

        if x is None or x[0] is None or x[1] is None:
            continue

        run, hwp = int(x[0]), int(x[1])

        if run_start is None:
            run_start = run
        if prev_hwp is None:
            prev_hwp = hwp

        if prev_hwp != hwp:

            ret.append(
                HwpCcdbEntry(run_start, run - 1,
                             {'hwp': _epics2ccdb(prev_hwp)}))

            prev_hwp = hwp
            run_start = run

    ret.append(HwpCcdbEntry(run_start, run_max,
                            {'hwp': _epics2ccdb(prev_hwp)}))

    for x in ret:
        print(str(x))

    return ret
コード例 #11
0
def get_info_all():

    use_list = False

    # argv
    parser = argparse.ArgumentParser(description="", usage=get_usage())
    parser.add_argument("--run", type=str, help="run range")
    parser.add_argument("--list", type=str, help="use run list file")
    # However this script is to be used by shift crew to get idea about the time to change IHWP status
    # the default status needs to be False. WAC can copy the script to his/her personal folder and change the default
    parser.add_argument("--goodrun",
                        type=bool,
                        help="select only runs marked as Good",
                        default=False)
    args = parser.parse_args()

    # run list
    runs = []

    if args.run is not None:
        run_range = args.run
        brun = run_range.split("-")[0]
        erun = run_range.split("-")[1]
        for x in range(int(brun), int(erun) + 1):
            runs.append(x)

    if args.list is not None:
        use_list = True

    # override run range
    if use_list:
        del runs[:]
        with open(args.list, 'rb') as f:
            lines = filter(None, (line.rstrip() for line in f))
            for line in lines:
                runs.append(line)

    # DB connection
    con_str = "mysql://[email protected]:3306/a-rcdb"
    db = rcdb.RCDBProvider(con_str)

    if args.run is None and args.list is None:
        # no given run range/list
        run_query = db.session.query(Run)
        last_run = run_query.order_by(desc(Run.number)).first().number
        runs = [x for x in range(5408, int(last_run) + 1)]
        print "For all production runs ", runs[0], runs[-1]

    # search query
    """
    count_str = "event_count > 10000"
    type_str = "run_type in ['Production']"
    target_str = "'208Pb' in target_type"
    flag_str = "run_flag != 'Bad'"
    query_str =  type_str + " and " + target_str + " and " + flag_str
    """

    # dictionary to fill
    dd = {}

    # Output file
    fout = open('out_time.txt', 'wb')

    nrun = 0
    ngood = 0
    good_sum = 0
    charge_sum = 0

    # Get result
    result = db.select_runs("", runs[0], runs[-1])
    for run in result:

        runno = str(run.number)
        helFlipRate = 120.0
        # PREX2
        if run.number >= 3876 and run.number < 5000:
            helFlipRate = 240.0
        dd[runno] = {}

        # from db
        run_type = run.get_condition_value("run_type")
        target_type = run.get_condition_value("target_type")
        run_flag = run.get_condition_value("run_flag")
        arm_flag = run.get_condition_value("arm_flag")

        pass_cut = True

        if run_type is None or run_type not in ['Production']:
            pass_cut = False
        if ((target_type is None or '48Ca' not in target_type)
                and run.number > 5000) and (
                    (target_type is None or 'Pb' not in target_type)
                    and run.number < 5000):
            #if target_type is None or '48Ca' not in target_type:
            if run.get_condition_value("slug") < 3999:
                pass_cut = False

        good_run_flag = False
        # Tight cut
        if run_flag is not None and run_flag != 'Bad':
            good_run_flag = True

        if args.goodrun:
            if not good_run_flag:
                pass_cut = False

        # conservative selection:
        #if run_flag is not None and run_flag == 'Bad':
        #    good_run_flag = False
        #if (run_flag != 'Bad' and run_flag != 'Suspicious'):
        #    pass_cut = False

        charge1 = "0"
        charge2 = "0"

        if pass_cut:
            avg_cur = run.get_condition_value("beam_current")
            length = run.get_condition_value("run_length")
            ihwp = run.get_condition_value("ihwp")
            rhwp = run.get_condition_value("rhwp")

            # read prompt summary
            """
            0: start time
            1: end time
            2: number of events processed
            3: fraction of good events/all
            4: bcm mean
            """
            summary_output = get_summary_output(str(run.number))
            # Start Time, End Time, Number of events processed, Percentage of good events, Current

            # if prompt analysis output exists or not
            if not summary_output:
                # skip the run if there is no prompt summary
                print "=== Prompt summary output does not exist for run: ", runno, ", skip this run for Good charge"
                charge2 = "0"
                prompt_time = "0"
                start_time = " " + run.start_time.__str__()
                end_time = " " + run.start_time.__str__()
            else:
                start_time = summary_output[0]
                end_time = summary_output[1]
                if length is None:
                    # use prompt Nevent instead
                    length = float(summary_output[2]) * 1.0 / helFlipRate
                # good charge from prompt output
                if 'nan' in summary_output[3]:
                    print "=== No good event processed for run :", runno, " , skip this run"
                    prompt_time = "0"
                    charge2 = "0"
                else:
                    prompt_time = float(summary_output[2]) * float(
                        summary_output[3]) * 0.01 * 1.0 / helFlipRate
                    if run.number >= 3876 and run.number < 5000:
                        charge2 = float(prompt_time) * float(
                            summary_output[4]) * 2
                    else:
                        charge2 = float(prompt_time) * float(summary_output[4])

            if length is None:
                print "=== Run did not end properly...", runno, ", let's skip this"
            else:
                # calculate charge all (from epics)
                charge1 = float(avg_cur) * float(length)

            # If one uses a list, we don't do QA from DB:
            if use_list:
                if runno not in runs:
                    charge1 = "0"
                    charge2 = "0"

            # fill dict
            dd[runno]["avg_cur"] = avg_cur
            dd[runno]["length"] = length
            dd[runno]["charge_all"] = charge1
            dd[runno]["charge_good"] = charge2
            dd[runno]["eff_time"] = prompt_time
            dd[runno]["start_time"] = start_time
            dd[runno]["end_time"] = end_time
            dd[runno]["epoch_time_start"] = time.mktime(
                datetime.datetime.strptime(start_time,
                                           " %Y-%m-%d %H:%M:%S").timetuple())
            dd[runno]["epoch_time"] = time.mktime(
                datetime.datetime.strptime(end_time,
                                           " %Y-%m-%d %H:%M:%S").timetuple())
            if dd[runno]["epoch_time"] > 2000000000.0:
                dd[runno]["epoch_time"] = time.mktime(
                    datetime.datetime.strptime(
                        start_time, " %Y-%m-%d %H:%M:%S").timetuple())

        else:
            #print runno, run_type, target_type, run_flag
            start_time = " " + run.start_time.__str__()
            end_time = " " + run.start_time.__str__()
            dd[runno]["charge_all"] = "0"
            dd[runno]["charge_good"] = "0"
            dd[runno]["start_time"] = run.start_time
            dd[runno]["end_time"] = run.start_time
            #dd[runno]["epoch_time_start"] = run.start_time #0
            #dd[runno]["epoch_time"] = run.start_time #0
            if runno < "5000" and runno is not "5000":
                #dd[runno]["epoch_time_start"] = run.start_time #0
                #dd[runno]["epoch_time"] = run.start_time #0
                dd[runno]["epoch_time_start"] = time.mktime(
                    datetime.datetime.strptime(
                        start_time, " %Y-%m-%d %H:%M:%S").timetuple())
                dd[runno]["epoch_time"] = time.mktime(
                    datetime.datetime.strptime(
                        start_time, " %Y-%m-%d %H:%M:%S").timetuple())
            else:
                dd[runno]["epoch_time_start"] = time.mktime(
                    datetime.datetime.strptime(
                        start_time, " %Y-%m-%d %H:%M:%S").timetuple())
                dd[runno]["epoch_time"] = time.mktime(
                    datetime.datetime.strptime(
                        start_time, " %Y-%m-%d %H:%M:%S").timetuple())
            #print runno, " failed RCDB"
            #dd[runno]["epoch_time_start"] = time.mktime(datetime.datetime.strptime(run.start_time, " %Y-%m-%d %H:%M:%S").timetuple())
            #dd[runno]["epoch_time"] = time.mktime(datetime.datetime.strptime(run.start_time, " %Y-%m-%d %H:%M:%S").timetuple())

        # Sum over all production runs (with 208Pb target)
        charge_sum += float(charge1)
        nrun += 1

        if fDEBUG:
            print runno, charge_sum, float(charge1) * 1.e-6

        # Count good runs
        if dd[runno]["charge_all"] != "0":
            ngood += 1
            good_sum += float(dd[runno]["charge_good"])

        print >> fout, runno, dd[runno]["end_time"], dd[runno][
            "epoch_time_start"], dd[runno]["epoch_time"], dd[runno][
                "charge_all"], dd[runno]["charge_good"]

    print
    print("Total runs: %d,\t\tTotal charge sum: %.2f C" %
          (nrun, float(charge_sum) * 1.e-6))
    print("Total good runs: %d,\tGood charge sum: %.2f C" %
          (ngood, float(good_sum) * 1.e-6))
コード例 #12
0
ファイル: hdswif.py プロジェクト: JeffersonLab/hd_utilities
def main(argv):
    global VERBOSE

    # Default to run over all runs, files
    RUN            = "all"
    FILE           = "all"
    FORMATTED_RUN  = ""
    FORMATTED_FILE = ""
    USERCONFIGFILE = ""

    # Read in command line args
    parser = OptionParser(usage = str("\n"
                                      + "hdswif.py [option] [workflow]\n"
                                      + "[option] = {create, list, run (n), status, freeze, unfreeze, \n"
                                      + "            add, resubmit, summary, cancel, delete, details}\n"
                                      + "----------------------------------\n"
                                      + "Options for add:\n"
                                      + "-c [config] -r (run) -f (file)\n"
                                      + "options in [ ] are required, options in ( ) are optional for running\n"
                                      + ""
                                      + "----------------------------------\n"
                                      + "Options for resubmit:\n"
                                      + "[problem] (additional resources)\n"
                                      + "[problem] = TIMEOUT, RLIMIT, SYSTEM\n"
                                      + "Additional resources in units of hrs for TIMEOUT, GB for RLIMIT\n"
                                      + "Default is to add 2 hrs for TIMEOUT, 2GB for RLIMIT\n"
                                      + "System error jobs are resubmitted with the same resources\n"
                                      + "----------------------------------\n"
                                      + "Options for details:\n"
                                      + "-r [run] -f [file] \n"
                                      + "----------------------------------\n"
                                      + "options in [ ] are required, options in ( ) are optional for running\n"
                                      + "(use -V 1 for verbose mode)"))
    parser.add_option("-r","--run", dest="run",
                      help="specify run(s) to run over")
    parser.add_option("-f","--file", dest="file",
                      help="specify file(s) to run over")

    parser.add_option("-c","--config", dest="config",
                      help="specify config file")
    parser.add_option("-V","--verbose",dest="verbose",
                      help="verbose output")
    
    (options, args) = parser.parse_args(argv)

    if(options.run):
        RUN = options.run
    if(options.file):
        FILE = options.file

    if(options.config):
        USERCONFIGFILE = options.config
    if(options.verbose):
        VERBOSE = True

    # If we want to list workflows, list and exit
    if(len(args)==1 and args[0] == "list"):
        list()
        return

    # For all other cases, make sure we have at least two arguments,
    # swif command and workflow
    if(len(args) < 2):
        parser.print_help()
        return

    WORKFLOW = args[1]

    # If we want to create workflow, create it and exit
    if(args[0] == "create"):
        create(WORKFLOW,USERCONFIGFILE)
        return

    # If we want to cancel workflow, cancel and exit
    elif(args[0] == "cancel"):
        cancel(WORKFLOW)
        return

    # If we want to delete workflow, delete it and exit
    elif(args[0] == "delete"):
        delete(WORKFLOW)
        return

    # If we want to run workflow, run it and exit
    elif(args[0] == "run"):
        if(len(args) == 2):
            runall(WORKFLOW)
        if(len(args) == 3):
            runnjobs(WORKFLOW, args[2])
        return

    # If we want to freeze workflow, freeze it and exit
    elif(args[0] == "freeze"):
        if(len(args) == 2):
            freeze(WORKFLOW)
        return

    # If we want to check status of workflow, check it and exit
    elif(args[0] == "status"):
        if(len(args) == 2):
            status(WORKFLOW)
        elif(len(args) == 3):
            if(not(args[2] == "xml" or args[2] == "json" or args[2] == "simple")):
                print "hdswif.py status [workflow] [display format]"
                print "display format = {xml, json, simple}"
                return
            fullstatus(WORKFLOW, str(args[2]))
        else:
            print "hdswif.py status [workflow] [display format]"
            print "display format = {xml, json, simple}"
            return
        return

    # If we want to create a summary of the workflow, call summary
    elif(args[0] == "summary"):
        # create xml output dir
        if not os.path.exists('./xml'):
            os.makedirs('./xml')
        filename = './xml/swif_output_' + WORKFLOW + '.xml'
        if VERBOSE == True:
            print 'output file name is ', filename

        # Check if xml output file exists
        recreate = True

        if os.path.isfile(filename):
            print 'File ', filename, ' already exists'
            
            while(1):
                answer = raw_input('Overwrite? (y/n)   ')
                if answer == 'n':
                    print 'Not recreating summary file for [', WORKFLOW, ']'
                    recreate = False
                    break
                elif answer == 'y':
                    recreate = True
                    break

        # Create the xml file to parse
        if recreate == True:
            print 'Creating XML output file........'
            os.system("swif status " + WORKFLOW + " -runs -summary -display xml > " + filename)
            print 'Created summary file ', filename, '..............'

        # Call parse_swif
        parse_swif.main([filename])
        return

    # Resubmit jobs by problem
    elif(args[0] == "resubmit"):
        if(len(args) == 3):
            # Assume args[1] is workflow,
            # args[2] is problem
            # Currently supports RLIMIT, TIMEOUT, SYSTEM
            # Default is to add 2GB of RAM for RLIMIT,
            # 2 hrs for TIMEOUT, and nothing for SYSTEM
            resubmit(args[1],args[2],2)
            exit()
        elif(len(args) == 4):
            if(is_number(args[3]) == True):
                # Assume args[1] is problem
                # Currently supports RLIMIT, TIMEOUT
                resubmit(args[1], args[2], int(args[3]))
                exit()
            else:
                print "hdswif.py resubmit [workflow] [problem] [resource to add]"
                print "[problem] = TIMEOUT, RLIMIT, SYSTEM"
                print "[resource to add] is in units of hrs for TIMEOUT, GB for RLIMIT"
                exit()
        else:
            print "hdswif.py resubmit [workflow] [problem] [resource to add]"
            print "[problem] = TIMEOUT, RLIMIT, SYSTEM"
            print "[resource to add] is in units of hrs for TIMEOUT, GB for RLIMIT"
            exit()

    # Check registerd job details using the run and file number
    elif(args[0] == "details"):

        # If run and file have not been set, warn and exit
        if(RUN == 'all' or FILE == 'all'):
            print 'Usage:'
            print 'hdswif.py details [workflow] -r [run] -f [file]'
            print 'Run and file MUST be specified'
            exit()

        output_job_details.main([WORKFLOW,RUN,FILE])
        exit()

    # We should only have add left at this stage
    else:
        if(args[0] != "add"):
            print "hdswif.py options:"
            print "create, list, run (n), status, add, resubmit, summary, cancel, delete"
            exit()

    #------------------------------------------+
    #       We are in add mode now             |
    #------------------------------------------+
    if VERBOSE == True:
        VERBOSE_INT = 1
    else:
        VERBOSE_INT = 0
    config_dict = read_config.main([USERCONFIGFILE, str(VERBOSE_INT)])

    # Format run and file numbers
    if(is_number(RUN) == True):
        FORMATTED_RUN = "{:0>6d}".format(int(RUN))
    elif(RUN == "all"):
        FORMATTED_RUN = "*"
    else:
        FORMATTED_RUN = RUN

    if(is_number(FILE) == True):
        FORMATTED_FILE = "{:0>3d}".format(int(FILE))
    elif(FILE == "all"):
        FORMATTED_FILE = "*"
    else:
        FORMATTED_FILE = FILE

    if(VERBOSE == True):
        print "FORMATTED_RUN = " + FORMATTED_RUN + " FORMATTED_FILE = " + FORMATTED_FILE

    # Get the list of production runs
    runs = []
    db = rcdb.RCDBProvider("mysql://rcdb@hallddb/rcdb")
    if(RUN == "all"):
        runs = db.select_runs("@is_production", 10000, 20000) #YIKES: MAX-RUN HARD-CODED!!!
    else:
        runs += [ db.get_run(int(RUN)) ]

    #------------------------------------------+
    #    Find raw evio files to submit         |
    #------------------------------------------+
    for run in runs:
       FORMATTED_RUN = "{:0>6d}".format(int(run.number))
       print "production run: " + FORMATTED_RUN

       file_list = []
       file_list = find_files(config_dict['RUNPERIOD'], FORMATTED_RUN, FORMATTED_FILE)
       if(VERBOSE == True):
           for file in file_list:
              print file
           print "size of file_list is " + str(len(file_list))

       #------------------------------------------+
       #         Add job to workflow              |
       #------------------------------------------+

       # Loop over files found for given run and file
       for mssfile in file_list:
           add_job(WORKFLOW, config_dict, mssfile)
コード例 #13
0
ファイル: rcdbscan.py プロジェクト: lorenzozana/hdds
#!/usr/bin/env python
#
# rcdbscan.py - does a scan of the GlueX rcdb looking up run conditions
#               related to the beamline, namely which primary collimator
#               was in place and which TPOL converter.

import os
import sys
import rcdb

# Open database connection
connect = os.environ["RCDB_CONNECTION"]
db = rcdb.RCDBProvider(connect)

conds = ["collimator_diameter", "polarimeter_converter"]
values = db.select_values(val_names=conds)
run = -1
col = 0
con = 0
lastrun = 0
for value in values.rows:
    lastcol = col
    lastcon = con
    run = value[0]
    if value[2] == "Retracted":
        con = 0
    elif value[2] == "Be 75um":
        con = 75
    elif value[2] == "Be 750um":
        con = 750
    if value[1] == "5.0mm hole":
コード例 #14
0
def main(argv):
    parser_usage = "gluex_MC.py config_file Run_Number/Range num_events [all other options]\n\n where [all other options] are:\n\n "
    parser_usage += showhelp()
    parser = OptionParser(usage=parser_usage)
    (options, args) = parser.parse_args(argv)

    #check if there are enough arguments
    if (len(argv) < 3):
        parser.print_help()
        return

    #check if the needed arguments are valid
    if len(args[1].split("=")) > 1 or len(args[2].split("=")) > 1:
        parser.print_help()
        return

    #!!!!!!!!!!!!!!!!!!REQUIRED COMMAND LINE ARGUMENTS!!!!!!!!!!!!!!!!!!!!!!!!
    CONFIG_FILE = args[0]
    RUNNUM = args[1]
    EVTS = int(args[2])
    #!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

    print("*********************************")
    print("Welcome to v2.0 of the MCwrapper")
    print("Thomas Britton 7/24/18")
    print("*********************************")

    #load all argument passed in and set default options
    VERBOSE = False

    TAGSTR = "I_dont_have_one"

    DATA_OUTPUT_BASE_DIR = "UNKNOWN_LOCATION"  #your desired output location

    ENVFILE = "my-environment-file"  #change this to your own environment file

    GENERATOR = "genr8"
    GENCONFIG = "NA"

    eBEAM_ENERGY = "rcdb"
    COHERENT_PEAK = "rcdb"
    MIN_GEN_ENERGY = "3"
    MAX_GEN_ENERGY = "12"
    RADIATOR_THICKNESS = "rcdb"
    BGRATE = "rcdb"  #GHz
    BGTAGONLY = "0"
    RUNNING_DIR = "./"
    ccdbSQLITEPATH = "no_sqlite"
    rcdbSQLITEPATH = "no_sqlite"

    GEANTVER = 4
    BGFOLD = "DEFAULT"
    RANDBGTAG = "none"

    CUSTOM_MAKEMC = "DEFAULT"
    CUSTOM_GCONTROL = "0"
    CUSTOM_PLUGINS = "None"

    BATCHSYS = "NULL"
    QUEUENAME = "DEF"
    #-------SWIF ONLY-------------
    # PROJECT INFO
    PROJECT = "gluex"  # http://scicomp.jlab.org/scicomp/#/projects
    TRACK = "simulation"  # https://scicomp.jlab.org/docs/batch_job_tracks

    # RESOURCES for swif jobs
    NCORES = "8"  # Number of CPU cores
    DISK = "10GB"  # Max Disk usage
    RAM = "20GB"  # Max RAM usage
    TIMELIMIT = "300minutes"  # Max walltime
    OS = "centos7"  # Specify CentOS65 machines

    PROJECT_ID = -1  #internally used when needed
    #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    VERSION = "mc"
    CALIBTIME = "notime"
    RECON_CALIBTIME = "notime"
    BASEFILENUM = 0
    PERFILE = 10000
    GENR = 1
    GEANT = 1
    SMEAR = 1
    RECON = 1
    CLEANGENR = 1
    CLEANGEANT = 1
    CLEANSMEAR = 1
    CLEANRECON = 0
    BATCHRUN = 0
    NOSECONDARIES = 0
    SHELL_TO_USE = "csh"
    #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
    #loop over config file and set the "parameters"
    f = open(CONFIG_FILE, "r")

    for line in f:
        if len(line) == 0:
            continue
        if line[0] == "#":
            continue

        parts = line.split("#")[0].split("=")
        #print parts
        if len(parts) == 1:
            #print "Warning! No Sets given"
            continue

        if len(parts) > 2 and str(parts[0]).upper() != "VARIATION":
            print("warning! I am going to have a really difficult time with:")
            print(line)
            print(
                "I'm going to just ignore it and hope it isn't a problem....")
            continue

        rm_comments = []
        if len(parts) > 1:
            rm_comments = parts[len(parts) - 1].split("#")

        j = -1
        for i in parts:
            j = j + 1
            i = i.strip()
            parts[j] = i

        if str(parts[0]).upper() == "VERBOSE":
            if rm_comments[0].strip().upper(
            ) == "TRUE" or rm_comments[0].strip() == "1":
                VERBOSE = True
        elif str(parts[0]).upper() == "PROJECT":
            PROJECT = rm_comments[0].strip()
        elif str(parts[0]).upper() == "TRACK":
            TRACK = rm_comments[0].strip()
        elif str(parts[0]).upper() == "NCORES":
            NCORES = rm_comments[0].strip()
        elif str(parts[0]).upper() == "DISK":
            DISK = rm_comments[0].strip()
        elif str(parts[0]).upper() == "RAM":
            RAM = rm_comments[0].strip()
        elif str(parts[0]).upper() == "TIMELIMIT":
            TIMELIMIT = rm_comments[0].strip()
        elif str(parts[0]).upper() == "OS":
            OS = rm_comments[0].strip()
        elif str(parts[0]).upper() == "DATA_OUTPUT_BASE_DIR":
            DATA_OUTPUT_BASE_DIR = rm_comments[0].strip()
        elif str(parts[0]).upper() == "ENVIRONMENT_FILE":
            ENVFILE = rm_comments[0].strip()
        elif str(parts[0]).upper() == "GENERATOR":
            GENERATOR = rm_comments[0].strip()
        elif str(parts[0]).upper() == "GEANT_VERSION":
            GEANTVER = rm_comments[0].strip()
        elif str(parts[0]).upper() == "WORKFLOW_NAME":
            WORKFLOW = rm_comments[0].strip()
            if WORKFLOW.find(';') != -1 or WORKFLOW.find(
                    '&'
            ) != -1:  #THIS CHECK HELPS PROTECT AGAINST A POTENTIAL HACK IN WORKFLOW NAMES
                print("Nice try.....you cannot use ; or & in the name")
                exit(1)
        elif str(parts[0]).upper() == "GENERATOR_CONFIG":
            GENCONFIG = rm_comments[0].strip()
        elif str(parts[0]).upper() == "CUSTOM_MAKEMC":
            CUSTOM_MAKEMC = rm_comments[0].strip()
        elif str(parts[0]).upper() == "CUSTOM_GCONTROL":
            CUSTOM_GCONTROL = rm_comments[0].strip()
        elif str(parts[0]).upper() == "BKG":
            bkg_parts = rm_comments[0].strip().split("+")
            #print bkg_parts
            for part in bkg_parts:
                subparts = part.split(":")
                if len(subparts) > 2:
                    print("Error in BKG Parsing: " + part)
                    return
                if subparts[0].upper() == "TAGONLY":
                    BGTAGONLY = 1
                    if BGFOLD == "DEFAULT":
                        BGFOLD = "TagOnly"
                    if len(subparts) == 2:
                        BGRATE = subparts[1]

                elif subparts[0].upper() == "BEAMPHOTONS":
                    #print subparts
                    BGFOLD = subparts[0]
                    if len(subparts) == 2:
                        BGRATE = subparts[1]
                elif subparts[0].upper() == "RANDOM" or subparts[0].upper(
                ) == "DEFAULT":
                    BGFOLD = subparts[0]
                    if len(subparts) == 2:
                        RANDBGTAG = subparts[1]
                else:
                    BGFOLD = part

            #IF BEAMPHOTONS OR TAGONLY IS IN THE LIST AND A BGRATE IS NOT SPECIFIED AND WE ARE TALKING VARIATION=mc THEN SET IT PROPERLY
            #print BGFOLD
            #print BGTAGONLY
            #print BGRATE
            #return

        elif str(parts[0]).upper() == "EBEAM_ENERGY":
            eBEAM_ENERGY = rm_comments[0].strip()
        elif str(parts[0]).upper() == "COHERENT_PEAK":
            COHERENT_PEAK = rm_comments[0].strip()
        elif str(parts[0]).upper() == "RADIATOR_THICKNESS":
            RADIATOR_THICKNESS = rm_comments[0].strip()
        elif str(parts[0]).upper() == "GEN_MIN_ENERGY":
            MIN_GEN_ENERGY = rm_comments[0].strip()
        elif str(parts[0]).upper() == "GEN_MAX_ENERGY":
            MAX_GEN_ENERGY = rm_comments[0].strip()
        elif str(parts[0]).upper() == "TAG":
            TAGSTR = rm_comments[0].strip()
        elif str(parts[0]).upper() == "CUSTOM_PLUGINS":
            CUSTOM_PLUGINS = rm_comments[0].strip()
        elif str(parts[0]).upper() == "BATCH_SYSTEM":
            batch_sys_parts = rm_comments[0].strip().split(":")
            BATCHSYS = batch_sys_parts[0]
            if len(batch_sys_parts) > 1:
                QUEUENAME = batch_sys_parts[1]
        elif str(parts[0]).upper() == "RUNNING_DIRECTORY":
            RUNNING_DIR = rm_comments[0].strip()
        elif str(parts[0]).upper() == "RECON_CALIBTIME":
            RECON_CALIBTIME = rm_comments[0].strip()
        elif str(parts[0]).upper() == "VARIATION":
            #print parts
            #print rm_comments
            if (len(parts) > 2):
                VERSION = str(
                    parts[1]).split("calibtime")[0].split("#")[0].strip()
                CALIBTIME = str(parts[2]).split("#")[0].strip()
            else:
                VERSION = rm_comments[0].strip()
        elif str(parts[0]).upper() == "CCDBSQLITEPATH":
            ccdbSQLITEPATH = rm_comments[0].strip()
        elif str(parts[0]).upper() == "RCDBSQLITEPATH":
            rcdbSQLITEPATH = rm_comments[0].strip()
        elif str(parts[0]).upper() == "NOSECONDARIES":
            NOSECONDARIES = rm_comments[0].strip()
        else:
            print("unknown config parameter!! " + str(parts[0]))
    #loop over command line arguments

    LOG_DIR = DATA_OUTPUT_BASE_DIR  #set LOG_DIR=DATA_OUTPUT_BASE_DIR

    for argu in args:
        argfound = 0
        flag = argu.split("=")
        #redundat check to jump over the first 4 arguments
        if (len(flag) < 2):
            continue
        else:  #toggle the flags as user defines
            if flag[0] == "variation":
                argfound = 1

                VERSION = flag[1]
                CALIBTIME = "notime"
#                               for part in range(2,len(flag)):
#                                      VERSION+="="+flag[part]
            if flag[0] == "calibtime":
                argfound = 1
                CALIBTIME = flag[1]
            if flag[0] == "per_file":
                argfound = 1
                PERFILE = int(flag[1])
            if flag[0] == "base_file_number":
                argfound = 1
                BASEFILENUM = int(flag[1])
            if flag[0] == "generate":
                argfound = 1
                GENR = int(flag[1])
            if flag[0] == "geant":
                argfound = 1
                GEANT = int(flag[1])
            if flag[0] == "mcsmear":
                argfound = 1
                SMEAR = int(flag[1])
            if flag[0] == "recon":
                argfound = 1
                RECON = int(flag[1])
            if flag[0] == "cleangenerate":
                argfound = 1
                CLEANGENR = int(flag[1])
            if flag[0] == "cleangeant":
                argfound = 1
                CLEANGEANT = int(flag[1])
            if flag[0] == "cleanmcsmear":
                argfound = 1
                CLEANSMEAR = int(flag[1])
            if flag[0] == "cleanrecon":
                argfound = 1
                CLEANRECON = int(flag[1])
            if flag[0] == "batch":
                argfound = 1
                BATCHRUN = int(flag[1])
            if flag[0] == "numthreads":
                argfound = 1
                NCORES = str(flag[1])
            if flag[0] == "logdir":
                argfound = 1
                LOG_DIR = str(flag[1])
            if flag[0] == "projid":
                argfound = 1
                PROJECT_ID = str(flag[1])
            if flag[0] == "shell":
                argfound = 1
                SHELL_TO_USE = str(flag[1])
            if argfound == 0:
                print("WARNING OPTION: " + argu + " NOT FOUND!")

#  if str(GEANTVER)=="3":
#          print "!!!  Warning: Geant 3 detected! NumThreads has been set to 1"
#          print "!!!  This is done to ensure efficient use of resources while running and should provide faster job starts."
#          NCORES="2"
#          print ""

    if DATA_OUTPUT_BASE_DIR == "UNKNOWN_LOCATION":
        print("I doubt that the system will find " + DATA_OUTPUT_BASE_DIR +
              " so I am saving you the embarassment and stopping this")
        return

    name_breakdown = GENCONFIG.split("/")
    CHANNEL = name_breakdown[len(name_breakdown) - 1].split(".")[0]

    #print a line indicating SWIF or Local run
    if BATCHRUN == 0 or BATCHSYS == "NULL":
        print("Locally simulating " + args[2] + " " + CHANNEL + " Events")
    else:
        print("Creating " + WORKFLOW + " to simulate " + args[2] + " " +
              CHANNEL + " Events")
    # CREATE WORKFLOW

    #username = getpass.getuser()
    #print(username)
    #exit

    if (BATCHSYS.upper() == "SWIF" and int(BATCHRUN) != 0):
        status = subprocess.call(["swif", "create", "-workflow", WORKFLOW])

    #calculate files needed to gen
    FILES_TO_GEN = EVTS / PERFILE
    REMAINING_GEN = EVTS % PERFILE

    indir = os.environ.get('MCWRAPPER_CENTRAL')

    script_to_use = "/MakeMC.csh"

    loginSHELL = environ['SHELL'].split("/")

    if loginSHELL[len(loginSHELL) - 1] == "bash" or (
            BATCHSYS.upper() == "OSG"
            and int(BATCHRUN) != 0) or SHELL_TO_USE == "bash":
        script_to_use = "/MakeMC.sh"
    elif loginSHELL[len(loginSHELL) - 1] == "zsh" or SHELL_TO_USE == "zcsh":
        script_to_use = "/MakeMC.sh"

    indir += script_to_use

    if len(CUSTOM_MAKEMC) != 0 and CUSTOM_MAKEMC != "DEFAULT":
        indir = CUSTOM_MAKEMC

    if (BATCHSYS.upper() == "OSG"
            or BATCHSYS.upper() == "SWIF") and int(BATCHRUN) != 0:
        ccdbSQLITEPATH = "batch_default"
        rcdbSQLITEPATH = "batch_default"

    if str(indir) == "None":
        print("MCWRAPPER_CENTRAL not set")
        return

    outdir = DATA_OUTPUT_BASE_DIR

    #if local run set out directory to cwd
    if outdir[len(outdir) - 1] != "/":
        outdir += "/"

    #for every needed file call the script with the right options

    #need two loops 1) for when RUNNUM is a number and 2) when it contains a "-" as in 11366-11555 or RunPeriod2017-02
    # for 2) use rcdb to get a list of the runs of a runperiod and amount of data.  Normalize number of events. Loop through list calling with the runnumbers from rcdb and their normalized num_events*requested events
    RunType = str(RUNNUM).split("-")

    if len(RunType) != 1:
        event_sum = 0.
        #Make python rcdb calls to form the vector
        db = rcdb.RCDBProvider("mysql://[email protected]/rcdb")

        #dbhost = "hallddb.jlab.org"
        #dbuser = '******'
        #dbpass = ''
        #dbname = 'data_monitoring'

        runlow = 0
        runhigh = 0

        if RunType[0] != "RunPeriod":
            runlow = RunType[0]
            runhigh = RunType[1]
        else:
            cnx = mysql.connector.connect(user='******',
                                          database='ccdb',
                                          host='hallddb.jlab.org')
            cursor = cnx.cursor()
            #ccddb = ccdb.CCDBProvider("mysql://ccdb_user@hallddb/ccdb")

            #runhigh=ccdbcon.session.query("select runMax from runRanges where name = test2")
            #runlow=runRange[0]
            #runhigh=runRange[1]
            runrange_name = ""
            for npart in RunType:
                if npart == "RunPeriod":
                    continue
                else:
                    runrange_name = runrange_name + npart

            cursor.execute(
                "select runMin,runMax from runRanges where name = '" +
                runrange_name + "'")
            runRange = cursor.fetchall()
            runlow = runRange[0][0]
            runhigh = runRange[0][1]
            print(runRange)
            #cursor.close()
            #cnx.close()
            print(str(runlow) + "-->" + str(runhigh))

        table = db.select_runs("@is_production and @status_approved", runlow,
                               runhigh).get_values(['event_count'], True)
        #print table
        #print len(table)
        for runs in table:
            if len(table) <= 1:
                break
            event_sum = event_sum + runs[1]

        print(event_sum)
        exit
        sum2 = 0.
        for runs in table:  #do for each job
            #print runs[0]
            if len(table) <= 1:
                break
            num_events_this_run = int((
                (float(runs[1]) / float(event_sum)) * EVTS) + .5)
            sum2 = sum2 + int((
                (float(runs[1]) / float(event_sum)) * EVTS) + .5)
            #print num_events_this_run

            if num_events_this_run == 0:
                continue

        #do for each file needed
            FILES_TO_GEN_this_run = num_events_this_run / PERFILE
            REMAINING_GEN_this_run = num_events_this_run % PERFILE

            for FILENUM_this_run in range(1, FILES_TO_GEN_this_run + 2):
                num_this_file = PERFILE

                if FILENUM_this_run == FILES_TO_GEN_this_run + 1:
                    num_this_file = REMAINING_GEN_this_run

                if num_this_file == 0:
                    continue

                COMMAND = str(
                    BATCHRUN) + " " + ENVFILE + " " + GENCONFIG + " " + str(
                        outdir) + " " + str(runs[0]) + " " + str(
                            BASEFILENUM + FILENUM_this_run + -1
                        ) + " " + str(num_this_file) + " " + str(
                            VERSION
                        ) + " " + str(CALIBTIME) + " " + str(GENR) + " " + str(
                            GEANT
                        ) + " " + str(SMEAR) + " " + str(RECON) + " " + str(
                            CLEANGENR) + " " + str(CLEANGEANT) + " " + str(
                                CLEANSMEAR
                            ) + " " + str(CLEANRECON) + " " + str(
                                BATCHSYS
                            ) + " " + str(NCORES).split(':')[-1] + " " + str(
                                GENERATOR) + " " + str(GEANTVER) + " " + str(
                                    BGFOLD
                                ) + " " + str(CUSTOM_GCONTROL) + " " + str(
                                    eBEAM_ENERGY
                                ) + " " + str(COHERENT_PEAK) + " " + str(
                                    MIN_GEN_ENERGY
                                ) + " " + str(MAX_GEN_ENERGY) + " " + str(
                                    TAGSTR
                                ) + " " + str(CUSTOM_PLUGINS) + " " + str(
                                    PERFILE) + " " + str(
                                        RUNNING_DIR
                                    ) + " " + str(ccdbSQLITEPATH) + " " + str(
                                        rcdbSQLITEPATH
                                    ) + " " + str(BGTAGONLY) + " " + str(
                                        RADIATOR_THICKNESS
                                    ) + " " + str(BGRATE) + " " + str(
                                        RANDBGTAG) + " " + str(
                                            RECON_CALIBTIME) + " " + str(
                                                NOSECONDARIES)
                if BATCHRUN == 0 or BATCHSYS == "NULL":
                    #print str(runs[0])+" "+str(BASEFILENUM+FILENUM_this_run+-1)+" "+str(num_this_file)
                    os.system(str(indir) + " " + COMMAND)
                else:
                    if BATCHSYS.upper() == "SWIF":
                        swif_add_job(WORKFLOW, runs[0],
                                     BASEFILENUM + FILENUM_this_run + -1,
                                     str(indir), COMMAND, VERBOSE, PROJECT,
                                     TRACK, NCORES, DISK, RAM, TIMELIMIT, OS,
                                     DATA_OUTPUT_BASE_DIR, PROJECT_ID)
                    elif BATCHSYS.upper() == "QSUB":
                        qsub_add_job(VERBOSE, WORKFLOW, runs[0],
                                     BASEFILENUM + FILENUM_this_run + -1,
                                     indir, COMMAND, NCORES,
                                     DATA_OUTPUT_BASE_DIR, TIMELIMIT,
                                     RUNNING_DIR, RAM, QUEUENAME, LOG_DIR,
                                     PROJECT_ID)
                    elif BATCHSYS.upper() == "CONDOR":
                        condor_add_job(VERBOSE, WORKFLOW, runs[0],
                                       BASEFILENUM + FILENUM_this_run + -1,
                                       indir, COMMAND, NCORES,
                                       DATA_OUTPUT_BASE_DIR, TIMELIMIT,
                                       RUNNING_DIR, PROJECT_ID)
                    elif BATCHSYS.upper() == "OSG":
                        OSG_add_job(VERBOSE, WORKFLOW, runs[0],
                                    BASEFILENUM + FILENUM_this_run + -1, indir,
                                    COMMAND, NCORES, DATA_OUTPUT_BASE_DIR,
                                    TIMELIMIT, RUNNING_DIR, ENVFILE, LOG_DIR,
                                    RANDBGTAG, PROJECT_ID)
                    elif BATCHSYS.upper() == "SLURM":
                        SLURM_add_job(VERBOSE, WORKFLOW, runs[0],
                                      BASEFILENUM + FILENUM_this_run + -1,
                                      indir, COMMAND, NCORES,
                                      DATA_OUTPUT_BASE_DIR, TIMELIMIT,
                                      RUNNING_DIR, ENVFILE, LOG_DIR, RANDBGTAG,
                                      PROJECT_ID)
            #print "----------------"

    else:
        if FILES_TO_GEN >= 500 and (ccdbSQLITEPATH == "no_sqlite"
                                    or rcdbSQLITEPATH == "no_sqlite"):
            print(
                "This job has >500 subjobs and risks ddosing the servers.  Please use sqlite or request again with a larger per file. "
            )
            return
        for FILENUM in range(1, FILES_TO_GEN + 2):
            num = PERFILE
            #last file gets the remainder
            if FILENUM == FILES_TO_GEN + 1:
                num = REMAINING_GEN
            #if ever asked to generate 0 events....just don't
            if num == 0:
                continue

            COMMAND = str(
                BATCHRUN) + " " + ENVFILE + " " + GENCONFIG + " " + str(
                    outdir) + " " + str(RUNNUM) + " " + str(
                        BASEFILENUM + FILENUM + -1
                    ) + " " + str(num) + " " + str(VERSION) + " " + str(
                        CALIBTIME
                    ) + " " + str(GENR) + " " + str(GEANT) + " " + str(
                        SMEAR
                    ) + " " + str(RECON) + " " + str(CLEANGENR) + " " + str(
                        CLEANGEANT) + " " + str(CLEANSMEAR) + " " + str(
                            CLEANRECON) + " " + str(BATCHSYS).upper(
                            ) + " " + str(NCORES).split(':')[-1] + " " + str(
                                GENERATOR) + " " + str(GEANTVER) + " " + str(
                                    BGFOLD
                                ) + " " + str(CUSTOM_GCONTROL) + " " + str(
                                    eBEAM_ENERGY
                                ) + " " + str(COHERENT_PEAK) + " " + str(
                                    MIN_GEN_ENERGY
                                ) + " " + str(MAX_GEN_ENERGY) + " " + str(
                                    TAGSTR
                                ) + " " + str(CUSTOM_PLUGINS) + " " + str(
                                    PERFILE) + " " + str(
                                        RUNNING_DIR
                                    ) + " " + str(ccdbSQLITEPATH) + " " + str(
                                        rcdbSQLITEPATH
                                    ) + " " + str(BGTAGONLY) + " " + str(
                                        RADIATOR_THICKNESS
                                    ) + " " + str(BGRATE) + " " + str(
                                        RANDBGTAG) + " " + str(
                                            RECON_CALIBTIME) + " " + str(
                                                NOSECONDARIES)

            #either call MakeMC.csh or add a job depending on swif flag
            if BATCHRUN == 0 or BATCHSYS == "NULL":
                os.system(str(indir) + " " + COMMAND)
            else:
                if BATCHSYS.upper() == "SWIF":
                    swif_add_job(WORKFLOW, RUNNUM, BASEFILENUM + FILENUM + -1,
                                 str(indir), COMMAND, VERBOSE, PROJECT, TRACK,
                                 NCORES, DISK, RAM, TIMELIMIT, OS,
                                 DATA_OUTPUT_BASE_DIR, PROJECT_ID)
                elif BATCHSYS.upper() == "QSUB":
                    qsub_add_job(VERBOSE, WORKFLOW, RUNNUM,
                                 BASEFILENUM + FILENUM + -1, indir, COMMAND,
                                 NCORES, DATA_OUTPUT_BASE_DIR, TIMELIMIT,
                                 RUNNING_DIR, RAM, QUEUENAME, LOG_DIR,
                                 PROJECT_ID)
                elif BATCHSYS.upper() == "CONDOR":
                    condor_add_job(VERBOSE, WORKFLOW, RUNNUM,
                                   BASEFILENUM + FILENUM + -1, indir, COMMAND,
                                   NCORES, DATA_OUTPUT_BASE_DIR, TIMELIMIT,
                                   RUNNING_DIR, PROJECT_ID)
                elif BATCHSYS.upper() == "OSG":
                    OSG_add_job(VERBOSE, WORKFLOW, RUNNUM,
                                BASEFILENUM + FILENUM + -1, indir, COMMAND,
                                NCORES, DATA_OUTPUT_BASE_DIR, TIMELIMIT,
                                RUNNING_DIR, ENVFILE, LOG_DIR, RANDBGTAG,
                                PROJECT_ID)
                elif BATCHSYS.upper() == "SLURM":
                    SLURM_add_job(VERBOSE, WORKFLOW, RUNNUM,
                                  BASEFILENUM + FILENUM + -1, indir, COMMAND,
                                  NCORES, DATA_OUTPUT_BASE_DIR, TIMELIMIT,
                                  RUNNING_DIR, ENVFILE, LOG_DIR, RANDBGTAG,
                                  PROJECT_ID)

    if BATCHRUN == 1 and BATCHSYS.upper() == "SWIF":
        print("All Jobs created.  Please call \"swif run " + WORKFLOW +
              "\" to run")
    elif BATCHRUN == 2 and BATCHSYS.upper() == "SWIF":
        swifrun = "swif run " + WORKFLOW
        subprocess.call(swifrun.split(" "))

    try:
        dbcnx.close()
    except:
        pass
コード例 #15
0
if args.v:
    logging.basicConfig(level=logging.INFO,
                        format='%(levelname)-9s: %(message)s')
else:
    logging.basicConfig(level=logging.CRITICAL,
                        format='%(levelname)-9s: %(message)s')

logger = logging.getLogger()

if not os.path.isdir(args.path):
    logger.critical('Invalid path:  ' + args.path)
    sys.exit(999)

cached_dirs = glob.glob(args.path + '/*')

db = rcdb.RCDBProvider(args.r)

run = 1e9

error_runs = []

while True:

    run = db.get_prev_run(run)

    try:
        run_start_time = db.get_condition(run, 'run_start_time').value
        age_hours_start = (datetime.datetime.now() -
                           run_start_time).total_seconds() / 60 / 60
        event_count = db.get_condition(run, 'event_count').value
        evio_files_count = db.get_condition(run, 'evio_files_count').value
コード例 #16
0
    # incorrect option given
    else:
        print "\n./scalerVtime.csh: invalid option", sys.argv[1], \
         "\nTry ./scalerVtime -help' for more information.\n"
        sys.exit()

# no option given
else:
    print "\n./scalerVtime.csh: no option given.\n" \
     "Try './scalerVtime -help' for more information.\n"
    sys.exit()

# Create output file and get rcdb info
output_file = open("RunInfo.txt", "w")
db = rcdb.RCDBProvider("mysql://rcdb@hallddb/rcdb")
runs = db.select_runs("@is_production and @status_approved", begin_run,
                      end_run)

# Check to make sure runs > 0
if len(runs) == 0:
    print "Error: Run list empty."
    print "Run(s) given are either not @is_production or @status_approved."
    print "RunInfo.txt will be an empty file and this analysis will FAIL."

# Write run_number, start_time, and stop_time to output file
for run in runs:
    output_file.write('%s %s %s\n' %
                      (run.number, run.start_time, run.end_time))
print "RunInfo.txt complete ..."
コード例 #17
0
ファイル: test_select_runs.py プロジェクト: rjones30/rcdb
    def setUp(self):
        self.db = rcdb.RCDBProvider("sqlite://", check_version=False)
        rcdb.provider.destroy_all_create_schema(self.db)
        runs = {}
        # create runs
        for i in range(1, 6):
            runs[i] = self.db.create_run(i)

        runs[9] = self.db.create_run(9)
        self.runs = runs

        self.db.create_condition_type("a", ConditionType.INT_FIELD, "Test condition 'a'")
        self.db.create_condition_type("b", ConditionType.FLOAT_FIELD, "Test condition 'b'")
        self.db.create_condition_type("c", ConditionType.BOOL_FIELD, "Test condition 'v'")
        self.db.create_condition_type("d", ConditionType.STRING_FIELD, "Test condition 'd'")
        self.db.create_condition_type("e", ConditionType.JSON_FIELD, "Test condition 'e'")
        self.db.create_condition_type("f", ConditionType.STRING_FIELD, "Test condition 'f'")

        self.db.add_condition(1, "a", 1)
        self.db.add_condition(2, "a", 2)
        self.db.add_condition(3, "a", 3)
        self.db.add_condition(4, "a", 4)
        self.db.add_condition(9, "a", 9)

        self.db.add_condition(1, "b", 1.01)
        self.db.add_condition(2, "b", 2.54)
        self.db.add_condition(3, "b", 2.55)
        self.db.add_condition(4, "b", 1.64)
        self.db.add_condition(5, "b", 2.32)
        self.db.add_condition(9, "b", 2.02)

        self.db.add_condition(1, "c", False)
        self.db.add_condition(2, "c", True)
        self.db.add_condition(3, "c", True)
        self.db.add_condition(4, "c", True)
        self.db.add_condition(5, "c", False)
        self.db.add_condition(9, "c", True)

        self.db.add_condition(1, "d", "haha")
        self.db.add_condition(4, "d", "hoho")
        self.db.add_condition(5, "d", "bang")
        self.db.add_condition(9, "d", "mew")

        self.db.add_condition(1, "e", '{"a":1}')
        self.db.add_condition(4, "e", "[1,2,3]")
        self.db.add_condition(9, "e", '[3,2,{"b":5}]')

        self.db.add_condition(4, "f", "my only value")

        """
        run |     a     |     b     |     c     |      d     |     e         |     f
        -------------------------------------------------------------------------------------
          1 | 1         | 1.01      | False     | haha       | {"a":1}       | None
          2 | 2         | 2.54      | True      | None       | None          | None
          3 | 3         | 2.55      | True      | None       | None          | None
          4 | 4         | 1.64      | True      | hoho       | [1,2,3]       | my only value
          5 | None      | 2.32      | False     | bang       | None          | None
          9 | 9         | 2.02      | True      | mew        | [3,2,{"b":5}] | None

        """

        def tearDown(self):
            self.db.disconnect()
コード例 #18
0
description = """
Test script that create conditions_view for all existing conditions

"""
if __name__ == "__main__":
    # Get connection string from arguments
    parser = argparse.ArgumentParser(description="This example shows select runs and put them by dates")
    parser.add_argument("connection_string",
                        nargs='?',
                        help="RCDB connection string mysql://rcdb@localhost/rcdb",
                        default="mysql://[email protected]/rcdb")
    args = parser.parse_args()

    # Open DB connection
    db = rcdb.RCDBProvider(args.connection_string)

    condition_types = db.get_condition_types()

    query = "SELECT runs.number run" + os.linesep
    query_joins = " FROM runs " + os.linesep
    for ct in condition_types:
        assert isinstance(ct, ConditionType)
        table_name = ct.name + "_table"
        query += "  ,{}.{} {}{}".format(table_name, ct.get_value_field_name(), ct.name, os.linesep)
        query_joins += "  LEFT JOIN conditions {0} " \
                       "  ON {0}.run_number = runs.number AND {0}.condition_type_id = {1}{2}"\
            .format(table_name, ct.id, os.linesep)

        print(ct)
コード例 #19
0
 def setUp(self):
     self.db = rcdb.RCDBProvider("sqlite://", check_version=False)
     rcdb.provider.destroy_all_create_schema(self.db)
     # create run
     self.db.create_run(1)
コード例 #20
0
#!/usr/bin/env python

from optparse import OptionParser
import os.path
import os
import sys
import re
import subprocess
import glob

##################### RCDB ENVIRONMENT ####################
os.environ["RCDB_HOME"] = "/group/halld/www/halldweb/html/rcdb_home"
sys.path.append("/group/halld/www/halldweb/html/rcdb_home/python")
import rcdb
db = rcdb.RCDBProvider("mysql://[email protected]/rcdb")
#db = rcdb.RCDBProvider("sqlite:////group/halld/www/halldweb/html/dist/rcdb.sqlite")

##################### Launch pair of runs ####################
def launch_run(MyRunNum, MyInDataDir, MyOutDataDir):

    fileNameDiamond = '%shd_root_0%s.root' % (MyInDataDir, MyRunNum)
    #fileNameDiamond = '%shdmon_online0%s.root' % (MyInDataDir, MyRunNum)
    subprocess.call(["rm", "hist_diamond.root"])
    subprocess.call(["ln", "-s", fileNameDiamond, "hist_diamond.root"])

    # Ken's fitter (integrate over entire run)
    subprocess.call(["root.exe", "-l", "-b", "-q", "cobremFit.C+"])
    subprocess.call(["mkdir", "-p", MyOutDataDir])
    subprocess.call(["mv", "enhancementFit.pdf", "%s/enhancementFit_%s.pdf" % (MyOutDataDir,MyRunNum)])
    subprocess.call(["mv", "enhancementFit.root", "%s/enhancementFit_%s.root" % (MyOutDataDir,MyRunNum)])
    
コード例 #21
0
        conditions.update(update_beam_conditions(run, log))

    # Add all the values that we've determined to the RCDB
    for (key, value) in conditions.items():
        log.debug(Lf("Adding cnd '{}'='{}'", key, value))

    db.add_conditions(run, conditions, True)

    log.debug("Committed to DB. End of update_rcdb_conds()")
    return conditions


# entry point
if __name__ == "__main__":
    log = logging.getLogger(
        'rcdb.update')  # create run configuration standard logger
    log.addHandler(logging.StreamHandler(
        sys.stdout))  # add console output for logger
    log.setLevel(logging.DEBUG
                 )  # print everything. Change to logging.INFO for less output

    #db = rcdb.RCDBProvider("sqlite:///"+sys.argv[1])
    #db = rcdb.RCDBProvider("mysql://[email protected]/rcdb")
    db = rcdb.RCDBProvider("mysql://*****:*****@gluondb1/rcdb" % sys.argv[1])
    update_rcdb_conds(db, int(sys.argv[2]), "update")

    #query = db.session.query(Run).filter(Run.number > 9999)
    #print(query.all())
    #for run in query.all():
    #    update_rcdb_conds(db, run.number)
コード例 #22
0
from token import NAME

from ply.lex import LexToken

import rcdb
import rcdb.lexer
from rcdb import RCDBProvider
from rcdb.model import ConditionType, Run, Condition
from sqlalchemy import or_, and_
from sqlalchemy.orm import joinedload, aliased
import shlex
import ast

from rcdb.stopwatch import StopWatchTimer

db = rcdb.RCDBProvider("mysql://[email protected]/rcdb")
"""
.session \
.query(Run) \
.options(subqueryload(Run.conditions)) \
.filter(Run.number == run_number) \
.first()
"""

import logging


# custom tree formatter
class TreeFormatter(logging.Formatter):
    formatPrefix = {}  # map loggername, formatPrefix
コード例 #23
0
def main():
    db = rcdb.RCDBProvider("mysql://rcdb@hallddb/rcdb")

    # date and time for 24 hours previous
    beginTime = datetime.datetime.now() - datetime.timedelta(days=1)  #days=1
    beginRun = 0
    CurrentPeriod = "RunPeriod-2019-11"

    # get first and last runs for the last 24 hours
    rcdb_query = r"daq_run=='PHYSICS_DIRC' and event_count > 500000 and solenoid_current > 100 and collimator_diameter != 'Blocking'"
    rcdb_query_url = urllib.parse.quote(rcdb_query)
    runs = db.select_runs(rcdb_query, 70000, 80000)
    for run in runs:
        print(run.number)
        print(run.end_time)
        print(beginTime)
        if beginTime is None:
            continue
        if run.end_time > beginTime:
            beginRun = run.number
            break
    endRun = runs[-1].number

    # check for new runs
    if beginRun == 0:
        sys.exit()

    # Prepare text for email (text list: l0)
    l0 = [
        'https://halldweb.jlab.org/wiki/index.php/Online_Monitoring_Data_Validation\n\n'
    ]
    l0.append("Plot browser links for yesterday's runs (since %s)\n" %
              beginTime.strftime("%Y-%m-%d %H:%M:%S"))
    l0.append('[Runs included]  %d-%d\n' % (beginRun, endRun))
    l0.append('[RCDB query]     %s\n' % rcdb_query)
    l0.append("\n========\n")
    l0.append("Occupancy Macros:\n")

    # set list of histograms titles and names in this list
    hist_names = []
    hist_names.append(["CDC_occupancy", "CDC"])
    hist_names.append(["FDC_occupancy", "FDC"])
    hist_names.append(["FCAL_occupancy", "FCAL"])
    hist_names.append(["BCAL_occupancy", "BCAL"])
    hist_names.append(["PS_occupancy", "PS"])
    hist_names.append(["RF_TPOL_occupancy", "RF & TPOL"])
    hist_names.append(["ST_occupancy", "ST"])
    hist_names.append(["TAGGER_occupancy", "TAGGER"])
    hist_names.append(["TOF_occupancy", "TOF"])
    hist_names.append(["DigiHits_occupancy", "Hit Multiplicity"])
    hist_names.append(["DIRC_occupancy", "DIRC South"])
    hist_names.append(["DIRC_North_occupancy", "DIRC North"])
    hist_names.append(["DIRC_hit", "DIRC Hits"])
    hist_names.append(["DIRC_digihit", "DIRC DigiHits"])
    hist_names.append(["CCAL_occupancy", "CCAL occupancy"])
    # hist_names.append(["ccal_cluster_et", "CCAL Cluster et"])
    # hist_names.append(["ccal_cluster_space", "CCAL Cluster Space"])
    # hist_names.append(["ccal_comp2", "CCAL Comp 2"])
    # hist_names.append(["ccal_comp", "CCAL Comp"])
    # hist_names.append(["ccal_dig_pedestal", "CCAL Digi Pedestal"])
    # hist_names.append(["ccal_dig_pulse", "CCAL Digi Pulse"])
    # hist_names.append(["ccal_hit_energy", "CCAL Hit Energy"])

    for hist in hist_names:
        l0.append(
            "%s: https://halldweb.jlab.org/data_monitoring/Plot_Browser.html?minRunNum=%d&maxRunNum=%d&RunPeriod=%s&Version=rawdata_ver00&Plot=%s&rcdb_query=%s\n\n"
            % (hist[1], beginRun, endRun, CurrentPeriod, hist[0],
               rcdb_query_url))

    l0.append("\n========\n")
    l0.append("\n\n High Level Online Macros:\n")

    # set list of high_level titles and names in this list
    high_level_online_names = []
    high_level_online_names.append(["Beam", "HistMacro_Beam"])
    high_level_online_names.append(["Kinematics", "HistMacro_Kinematics"])
    high_level_online_names.append(
        ["NumHighLevelObjects", "HistMacro_NumHighLevelObjects"])
    high_level_online_names.append(["PID", "HistMacro_PID"])
    high_level_online_names.append(["Trigger", "HistMacro_Trigger"])
    high_level_online_names.append(["Vertex", "HistMacro_Vertex"])

    for hist in high_level_online_names:
        l0.append(
            "%s: https://halldweb.jlab.org/data_monitoring/Plot_Browser.html?minRunNum=%d&maxRunNum=%d&RunPeriod=%s&Version=rawdata_ver00&Plot=%s&rcdb_query=%s\n\n"
            % (hist[0], beginRun, endRun, CurrentPeriod, hist[1],
               rcdb_query_url))

    # set list of high_level titles and names in this list
    # high_level_names = []
    # high_level_names.append(["Beam", "Beam"])
    # high_level_names.append(["Vertex", "Vertex"])
    # high_level_names.append(["Trigger", "Trigger"])
    # high_level_names.append(["NumHighLevelObjects", "# HL Obj"])
    # high_level_names.append(["PID", "PID"])
    # high_level_names.append(["Kinematics", "Kinematics"])

    # for hist in high_level_names:
    #   l0.append("%s: https://halldweb.jlab.org/cgi-bin/data_monitoring/monitoring/plotBrowser.py?run1=%d&run2=%d&plot=HistMacro_%s&ver=rawdata_ver00\n\n" % (hist[1], beginRun, endRun, hist[0]))

    with open(
            "/home/gluex/simple_email_list/lists/monitoring_update/message.txt",
            "w") as f:
        f.writelines(l0)
コード例 #24
0
ファイル: epics_helper.py プロジェクト: sanghwapark/pvdb-1
        conditions.update(mya_get_run_conds(run, log))

    db.add_conditions(run, conditions, True)
    log.debug("Commited to DB. End of update_db_conds()")

    return conditions


if __name__ == "__main__":
    # check if it would have caget available
    host = socket.gethostname()
    if not ("adaq" in host or "aonl" in host or "ops" in host):
        print "You may  not have caget available. Check first"
        sys.exit()

    log = logging.getLogger(
        'pvdb.update')  # create run configuration standard logger
    log.addHandler(logging.StreamHandler(
        sys.stdout))  # add console output for logger
    log.setLevel(logging.DEBUG
                 )  # print everything. Change to logging.INFO for less output

    con_str = os.environ["RCDB_CONNECTION"] \
        if "RCDB_CONNECTION" in os.environ.keys() \
        else "mysql://pvdb@localhost/pvdb"

    db = rcdb.RCDBProvider(con_str)

    # argv = run number
    update_db_conds(db, int(sys.argv[1]), "update")
コード例 #25
0
    def GetChargeall(self,
                     beginRunID=5939,
                     EndRunID=5943,
                     targName="D-208Pb10-D"):
        '''
        Get the Accumumated charge between runID range

        :param beginRunID:
        :param EndRunID:
        :return:
        '''

        runs = []
        brun = beginRunID
        erun = EndRunID

        for x in range(int(brun), int(erun) + 1):
            runs.append(x)

        # DB connection
        con_str = "mysql://[email protected]:3306/a-rcdb"
        db = rcdb.RCDBProvider(con_str)

        # dictionary to fill
        dd = {}

        nrun = 0
        ngood = 0
        good_sum = 0
        charge_sum = 0

        # get result
        result = db.select_runs("", runs[0], runs[-1])
        for run in result:
            runno = str(run.number)
            helFlipRate = 120.0
            # PREX2
            if run.number >= 3876 and run.number < 5000:
                helFlipRate = 240.0
            dd[runno] = {}

            # from db
            run_type = run.get_condition_value("run_type")
            target_type = run.get_condition_value("target_type")
            run_flag = run.get_condition_value("run_flag")
            arm_flag = run.get_condition_value("arm_flag")

            pass_cut = True

            if run_type is None or run_type not in ['Production']:
                pass_cut = False

            if target_type is None or targName not in target_type:
                if run.get_condition_value("slug") < 3999:
                    print("Non-production target run")
                    print(run.get_condition_value("target_type"))
                    pass_cut = False
            good_run_flag = False
            # Tight cut
            if run_flag is not None and run_flag != 'Bad':
                good_run_flag = True

            print(run)
            print(run.start_time)

            charge1 = "0"
            charge2 = "0"

            if pass_cut:
                avg_cur = run.get_condition_value("beam_current")
                length = run.get_condition_value("run_length")
                ihwp = run.get_condition_value("ihwp")
                rhwp = run.get_condition_value("rhwp")

                # read prompt summary
                """
                0: start time
                1: end time
                2: number of events processed
                3: fraction of good events/all
                4: bcm mean
                """
                summary_output = self.get_summary_output(str(run.number))
                # if prompt analysis output exists or not
                if not summary_output:
                    # skip the run if there is no prompt summary
                    print("=== Prompt summary output does not exist for run: ",
                          runno, ", skip this run for Good charge")
                    charge2 = "0"
                    prompt_time = "0"
                    start_time = run.start_time
                else:
                    start_time = summary_output[0]
                    if length is None:
                        # use prompt Nevent instead
                        length = float(summary_output[2]) * 1.0 / helFlipRate
                    # good charge from prompt output
                    if 'nan' in summary_output[3]:
                        print("=== No good event processed for run :", runno,
                              " , skip this run")
                        prompt_time = "0"
                        charge2 = "0"
                    else:
                        prompt_time = float(summary_output[2]) * float(
                            summary_output[3]) * 0.01 * 1.0 / helFlipRate
                        if run.number >= 3876 and run.number < 5000:
                            charge2 = float(prompt_time) * float(
                                summary_output[4]) * 2
                        else:
                            charge2 = float(prompt_time) * float(
                                summary_output[4])
                if length is None:
                    print("=== Run did not end properly...", runno,
                          ", let's skip this")
                else:
                    # calculate charge all (from epics)
                    charge1 = float(avg_cur) * float(length)
                    # fill dict
                dd[runno]["avg_cur"] = avg_cur
                dd[runno]["length"] = length
                dd[runno]["charge_all"] = charge1
                dd[runno]["charge_good"] = charge2
                dd[runno]["eff_time"] = prompt_time
                dd[runno]["start_time"] = start_time
            else:
                # print runno, run_type, target_type, run_flag
                dd[runno]["charge_all"] = "0"
                dd[runno]["charge_good"] = "0"
                dd[runno]["start_time"] = run.start_time

            # Sum over all production runs (with 208Pb target)
            charge_sum += float(charge1)
            nrun += 1
            # Count good runs
            if dd[runno]["charge_all"] != "0":
                ngood += 1
                good_sum += float(dd[runno]["charge_good"])
        print("Total runs: %d,\t\tTotal charge sum: %.2f C" %
              (nrun, float(charge_sum) * 1.e-6))
        return float(charge_sum) * 1.e-6
コード例 #26
0
ファイル: get_started.py プロジェクト: JeffersonLab/pvdb
import os
import rcdb
import parity_rcdb

if __name__=="__main__":

    con_str = os.environ["RCDB_CONNECTION"] \
        if "RCDB_CONNECTION" in os.environ.keys() \
        else "mysql://pvdb@localhost/pvdb"

    """
    Connect and create default tables
    CAUTION: it will delete existing schema and create a new one
    """
    db = rcdb.RCDBProvider(con_str, check_version=False)
    rcdb.provider.destroy_all_create_schema(db)

    print("create default condition types")
    rcdb.create_condition_types(db)

    print("add parity condition types")
    parity_rcdb.create_condition_types(db)

    """
    v = SchemaVersion()
    v.version = rcdb.SQL_SCHEMA_VERSION
    v.comment = "CREATED BY get_started script"
    db.session.add(v)
    db.session.commit()
    """
コード例 #27
0
def main(argv):
    global VERBOSE  # so can modify here

    # PARSER
    parser_usage = "launch.py job_configfile minrun maxrun\n\n"
    parser_usage += "optional: -f file_num: file_num must be 3 digits, with leading 0's if necessary)\n"
    parser_usage += "          but, it can be a search string for glob (e.g. first 5 files: -f '00[0-4]' (MUST include quotes!))\n\n"
    parser_usage += "optional: -v True: verbose output\n\n"
    parser = OptionParser(usage=parser_usage)

    # PARSER OPTIONS
    parser.add_option("-f",
                      "--file",
                      dest="file",
                      help="specify file(s) to run over")
    parser.add_option("-v", "--verbose", dest="verbose", help="verbose output")

    # GET ARGUMENTS
    (options, args) = parser.parse_args(argv)
    if (len(args) < 3):
        parser.print_help()
        return

    # SET INPUT VARIABLES
    JOB_CONFIG_FILE = args[0]
    MINRUN = int(args[1])
    MAXRUN = int(args[2])
    VERBOSE = True if (options.verbose) else False
    INPUT_FILE_NUM = options.file if (
        options.file
    ) else "*"  #must be three digits, with leading 0's if necessary

    # READ CONFIG
    config_dict = read_config(JOB_CONFIG_FILE)
    validate_config(config_dict)

    # SET CONTROL VARIABLES
    WORKFLOW = config_dict["WORKFLOW"]
    RCDB_QUERY = config_dict["RCDB_QUERY"] if ("RCDB_QUERY"
                                               in config_dict) else ""
    INDATA_TOPDIR = config_dict["INDATA_TOPDIR"] if ("INDATA_TOPDIR"
                                                     in config_dict) else ""

    # GET THE LIST OF GOOD RUNS
    db = rcdb.RCDBProvider("mysql://rcdb@hallddb/rcdb")
    good_runs = []
    if (VERBOSE == True):
        print "RCDB_QUERY = " + RCDB_QUERY
    if (RCDB_QUERY != ""):
        good_runs = db.select_runs(RCDB_QUERY, MINRUN, MAXRUN)
    if (VERBOSE == True):
        print str(len(good_runs)) + " good runs in range: " + str(
            MINRUN) + " - " + str(MAXRUN)

    # FIND & ADD JOBS
    for RUN in range(MINRUN, MAXRUN + 1):

        # See if is good run
        rcdb_run_info = db.get_run(int(RUN))
        if (RCDB_QUERY != "") and (rcdb_run_info not in good_runs):
            continue

        # Format run number
        FORMATTED_RUN = "%06d" % RUN

        # Find files for run number: First try separate folder for each run
        INDATA_DIR = INDATA_TOPDIR
        file_list = find_files(INDATA_DIR, FORMATTED_RUN, INPUT_FILE_NUM)
        if (len(file_list) == 0):  # No files. Now just try the input dir
            INDATA_DIR = INDATA_TOPDIR + "/"
            file_list = find_files(INDATA_DIR, FORMATTED_RUN, INPUT_FILE_NUM)

        if (VERBOSE == True):
            print str(len(file_list)) + " files found for run " + str(RUN)

        # Add jobs to workflow
        for FILEPATH in file_list:
            add_job(WORKFLOW, FILEPATH, config_dict)
コード例 #28
0
def main():
    # Define command line options
    parser = OptionParser(usage="update_rcdb_runstatus.py")
    parser.add_option("-p",
                      "--password",
                      dest="password",
                      help="Password to connect to RCDB")
    parser.add_option("-r",
                      "--run_number",
                      dest="run_number",
                      help="Run number to update")
    parser.add_option("-R",
                      "--run_range",
                      dest="run_range",
                      help="Run range to update with format (min,max)")
    parser.add_option("--approve",
                      dest="status_approve",
                      action="store_true",
                      help="Set status to 'approved' (1)")
    parser.add_option("--approve_long",
                      dest="status_approve_long",
                      action="store_true",
                      help="Set status to 'approved_long' (2)")
    parser.add_option("--reject",
                      dest="status_reject",
                      action="store_true",
                      help="Set status to 'rejected' (0)")
    parser.add_option("--unchecked",
                      dest="status_unchecked",
                      action="store_true",
                      help="Set status to 'unchecked' (-1)")

    # parse command lines
    (options, args) = parser.parse_args(sys.argv)

    # handle options
    min_run = None
    max_run = None
    status = None

    if options.run_number:
        try:
            min_run = int(options.run_number)
            max_run = int(options.run_number)
        except ValueError:
            print "Invalid run number = " + options.run_number
            sys.exit(0)

    if options.run_range:
        try:
            (minval, maxval) = options.run_range.split(",")
            min_run = int(minval)
            max_run = int(maxval)
        except:
            print "Invalid run range = " + options.run_range
            sys.exit(0)

    # set statuses
    if options.status_approve:
        status = 1
    if options.status_approve_long:
        status = 2
    if options.status_reject:
        status = 0
    if options.status_unchecked:
        status = -1

    if status is None:
        print "Need to specify status to set!"
        parser.print_help()
        sys.exit(0)

    if max_run is None or min_run is None:
        print "Need to specify runs to set!"
        parser.print_help()
        sys.exit(0)

    # add information to the DB
    db = rcdb.RCDBProvider("mysql://*****:*****@gluondb1/rcdb" % options.password)
    if max_run == min_run:
        db.add_condition(max_run, "status", status, True, auto_commit=True)
    else:
        #for run in xrange(min_run,max_run+1):
        query = db.session.query(Run).filter(
            Run.number.between(min_run, max_run))
        for run in query.all():
            db.add_condition(run.number,
                             "status",
                             status,
                             True,
                             auto_commit=False)
        db.session.commit()
コード例 #29
0
def main():
    # Define command line options
    parser = OptionParser(
        usage="process_new_offline_data.py input_directory output_directory")
    parser.add_option("-p",
                      "--disable_plots",
                      dest="disable_plotting",
                      action="store_true",
                      help="Don't make PNG files for web display")
    parser.add_option(
        "-d",
        "--disable_summary",
        dest="disable_db_summary",
        action="store_true",
        help="Don't calculate summary information and store it in the DB")
    parser.add_option(
        "-s",
        "--disable_hadd",
        dest="disable_hadd",
        action="store_true",
        help="Don't sum ouptut histograms into one combined file.")
    parser.add_option("-f",
                      "--force",
                      dest="force",
                      action="store_true",
                      help="Ignore list of already processed runs")
    parser.add_option("-R",
                      "--run_number",
                      dest="run_number",
                      help="Process only this particular run number")
    parser.add_option("-V",
                      "--version_number",
                      dest="version_number",
                      help="Save summary results with this DB version ID")
    parser.add_option(
        "-v",
        "--version",
        dest="version_string",
        help=
        "Save summary results with a particular data version, specified using the string \"RunPeriod,Revision\", e.g., \"RunPeriod-2014-10,5\""
    )
    parser.add_option("-b",
                      "--min_run",
                      dest="min_run",
                      help="Minimum run number to process")
    parser.add_option("-e",
                      "--max_run",
                      dest="max_run",
                      help="Maximum run number to process")
    parser.add_option("-L",
                      "--logfile",
                      dest="logfile",
                      help="Base file name to save logs to")
    parser.add_option("-t",
                      "--nthreads",
                      dest="nthreads",
                      help="Number of threads to use")
    parser.add_option("-A",
                      "--parallel",
                      dest="parallel",
                      action="store_true",
                      help="Enable parallel processing.")
    parser.add_option("-S",
                      "--save_rest",
                      dest="save_rest",
                      action="store_true",
                      help="Save REST files to conventional location.")
    parser.add_option(
        "-M",
        "--merge-incrementally",
        dest="merge_increment",
        action="store_true",
        help="Merge ROOT files incrementally and delete old ones.")
    parser.add_option("-E",
                      "--no-end-of-job-processing",
                      dest="noendofjob_processing",
                      action="store_true",
                      help="Disable end of run processing.")
    parser.add_option("--merge-trees",
                      dest="root_trees_to_merge",
                      help="Merge these ROOT trees.")
    parser.add_option("--merge-skims",
                      dest="evio_skims_to_merge",
                      help="Merge these EVIO skims.")
    parser.add_option("--merge-hddms",
                      dest="hddm_files_to_merge",
                      help="Merge these HDDM files.")
    parser.add_option("-T",
                      "--merged-root-output-dir",
                      dest="root_output_dir",
                      help="Directory to save merged ROOT files")
    parser.add_option(
        "-B",
        "--batchfarm-tempdir",
        dest="batch_tempdir",
        action="store_true",
        help=
        "Merge ROOT files in the local directory, as required by current batch farm configuration"
    )

    (options, args) = parser.parse_args(sys.argv)

    if (len(args) < 3):
        parser.print_help()
        sys.exit(0)

    # initialize configuration
    config = ProcessMonDataConfig()
    db = datamon_db()
    config.ProcessCommandline(args, options, db)

    # try to connect to RCDB
    rcdb_conn = None
    try:
        rcdb_conn = rcdb.RCDBProvider("mysql://rcdb@hallddb/rcdb")
    except:
        e = sys.exc_info()[0]
        print "Could not connect to RCDB: " + str(e)

    # Set up directories and any other prep work that needs to be done
    config.BuildEnvironment()

    # Check which runs have been already processed
    rundirs_on_disk = config.BuildListOfProcessedRunsOnDisk()

    # For each run, check to see if there are any ROOT files we haven't processed yet
    # If that is true, then we need to process the run - N.B. most of our outputs
    # depend on the full results from a run
    runs_to_process = []
    for rundir in rundirs_on_disk:
        try:
            runnum = int(rundir)
        except:
            continue

        # handle any options about which runs to process
        # specifying a particular run to process beats specifying a range
        if config.RUN_NUMBER is not None:
            if runnum != config.RUN_NUMBER:
                continue
        else:
            if runnum < config.MIN_RUN or runnum > config.MAX_RUN:
                continue

        if config.VERBOSE > 0:
            logging.info("checking run " + str(runnum))

        ## add blank run to DB if it doesn't exist
        if (db.GetRunID(runnum) < 0):
            db.CreateRun(runnum)
            # skip run if it's not in RCDB for some weird reason
            if rcdb_conn.get_run(runnum) is None:
                continue
            # add run start time, needed for monitoring web pages
            rcdb_run = rcdb_conn.get_run(runnum)
            run_properties = {}
            run_properties['start_time'] = rcdb_run.start_time
            run_properties['num_events'] = rcdb_run.get_condition_value(
                'event_count')
            db.UpdateRunInfo(runnum, run_properties)

        ## make sure we have a directory to store some meta-information
        rootfiles_already_processed = []  # let's not do this anymore
        if config.REVISION == "mc":
            log_dir = join(config.OUTPUT_DIRECTORY, "log", rundir)
        else:
            log_dir = join(config.OUTPUT_DIRECTORY, "log", rundir)
        if isfile(join(log_dir, "processed_files.dat")):
            rootfiles_already_processed = pickle.load(
                open(join(log_dir, "processed_files.dat"), "r"))

        #if config.REVISION == "mc":
        #    misc_dir = join(config.INPUT_DIRECTORY,"misc","%06d"%(int(rundir)))
        #else:
        #    misc_dir = join(config.INPUT_DIRECTORY,config.REVISION,"misc",rundir)
        #rootfiles_already_processed = []
        #if not os.path.exists(misc_dir):
        #    os.system("mkdir -p " + misc_dir)
        #if not os.path.isdir(misc_dir):
        #    logging.error("file %s exists and is not a directory, skipping this run ..."%misc_dir)
        #    continue
        #else:
        #    # check to see if we've already processed some of the ROOT files
        #    if isfile(join(misc_dir,"processed_files.dat")):
        #        rootfiles_already_processed = pickle.load( open(join(misc_dir,"processed_files.dat"),"r") )
        #    #except Exception, e:
        #    #    logging.error("Couldn't load list of processed files: %s"%str(e))

        ## figure out which files for this run are currently on disk
        rootfile_map = config.BuildROOTFileList(rundir)
        if not config.FORCE_PROCESSING and len(rootfile_map) == 0:
            continue

        ## figure out which files are new from the last run
        rootfiles_to_process = [
            f for f in sorted(rootfile_map.keys())
            if f not in rootfiles_already_processed
        ]

        #print "ROOTFILES_ALREADY_PROCESSED = " + str(rootfiles_already_processed)
        #print "ROOTFILE_MAP = " + str(rootfile_map)
        #print "ROOTFILES_TO_PROCESS = " + str(rootfiles_to_process)

        ## if there's new information, or if the user wants us to anyway,
        ## add the run to the list of the ones we should process
        if config.FORCE_PROCESSING or len(rootfiles_to_process) > 0:
            runs_to_process.append((runnum, config, rootfile_map))

    ## loop DONE

    ## Start processing all the runs!
    if config.VERBOSE > 0:
        logging.info("%d runs to process..." % (len(runs_to_process)))
    if len(runs_to_process) == 0:
        sys.exit(0)

    if options.parallel is None:
        # process serially
        for run_args in runs_to_process:
            ProcessOfflineData(run_args)
    else:
        # process in parallel
        p = multiprocessing.Pool(config.NTHREAD)
        p.map(ProcessOfflineData, runs_to_process)

    # save tarballs of log files and PNGs
    if config.EOR_PROCESSING and len(runs_to_process) > 0:
        # save log files
        logdir = join(config.INPUT_SMALLFILE_DIRECTORY, config.REVISION, "log")
        if isdir(logdir):
            os.system("tar cf %s/%s/log.tar %s" %
                      (config.INPUT_DIRECTORY, config.REVISION,
                       logdir))  # overwrite any existing file
        os.system("jcache put %s/%s/log.tar" %
                  (config.INPUT_DIRECTORY, config.REVISION))  # save to tape
        # save IDXA files (for EventStore)
        idxadir = join(config.INPUT_SMALLFILE_DIRECTORY, config.REVISION,
                       "IDXA")
        if isdir(idxadir):
            os.system("tar cf %s/%s/IDXA.tar %s" %
                      (config.INPUT_DIRECTORY, config.REVISION,
                       idxadir))  # overwrite any existing file
        os.system("jcache put %s/%s/IDXA.tar" %
                  (config.INPUT_DIRECTORY, config.REVISION))  # save to tape
        # save web figures
        os.system("tar cf %s/%s/web_figures.tar %s/Run*" %
                  (config.INPUT_DIRECTORY, config.REVISION,
                   config.OUTPUT_DIRECTORY))  # overwrite any existing file
コード例 #30
0
ファイル: create_empty_db.py プロジェクト: rjones30/rcdb
if __name__ == "__main__":
    parser = argparse.ArgumentParser(description='The utility to create RCDB empty database. '
                                                 'Erases (!) the existing database'
                                     )
    parser.add_argument("-c", "--connection", help="The connection string for mysql database")
    parser.add_argument('--i-am-sure', action='store_true', help="Place it to run the script")
    parser.add_argument('--add-def-con', action='store_true', default=False,
                        help="Add default conditions like run start time, event_count, event_rate")

    args = parser.parse_args()
    if not args.i_am_sure:
        print("This script CLEARS ALL DATA if the database exists")
        print("To execute this script please add the flag: ")
        print('  --i-am-sure')
        print("General usage:")
        parser.print_help()
        parser.print_usage()
        exit(1)

    print ("creating database:")
    db = rcdb.RCDBProvider(args.connection, check_version=False)
    rcdb.provider.destroy_all_create_schema(db)
    print("database created")

    if args.add_def_con:
        rcdb.create_condition_types(db)
        print("default conditions filled")