예제 #1
0
def poll_request():
    inp, out = options()

    with open(inp, 'r') as istream:
        payload = json.loads(istream.read())

    # Emulate polling for the PID by waiting a random amount of time since
    # making the request
    # You would obviously poll DAS with the PID in question to see if it had
    # finished, instead
    host = payload['workflow']['das_server']
    query = payload['workflow']['query']
    print "poll_request, host=%s, query=%s" % (host, query)
    timestamp = payload['workflow']['timestamp']
    data = get_data(host, query)
    payload['workflow']['das_data'] = data

    with open(out, 'w') as ostream:
        ostream.write(json.dumps(payload))
예제 #2
0
def spawn_workflow():
    inp, out = options()

    with open(inp, "r") as istream:
        payload = json.loads(istream.read())

    host = payload["workflow"].get("DASServer", "https://cmsweb.cern.ch")
    query = payload["workflow"].get("DASSeedQuery", "/A*/*/*")
    ntests = int(payload["workflow"].get("DASNtests", 5))
    lkeys = payload["workflow"].get(
        "DASLookupKeys", ["dataset", "block", "file", "summary", "file,lumi", "file,run,lumi"]
    )
    print "host=%s, seed=%s, ntests=%s, lkeys=%s" % (host, query, ntests, lkeys)

    # Fetch list of dataset from DAS server
    res = get_data(host, query)
    if res["status"] != "ok":
        msg = "Unable to fetch list of datasets from DAS server, "
        msg += "status=%s" % res["status"]
        raise Exception(msg)

    # parse resput data and extract list of datasets
    datasets = [r["dataset"][0]["name"] for r in res["data"]]

    # Generate workflows with DAS queries
    payloads = []
    for query in gen_queries(datasets[:ntests], lkeys):
        pld = copy.deepcopy(payload)
        pld["workflow"]["query"] = query
        pld["workflow"]["das_server"] = host
        pld["workflow"]["timestamp"] = int(time.time())
        print query
        payloads.append(pld)

    with open(out, "w") as ostream:
        ostream.write(json.dumps(payloads))