コード例 #1
0
ファイル: dbs3GetBlocks.py プロジェクト: giffels/DBS
api = create_api('DbsApi', config=config)

payload_handler = PayloadHandler()

payload_handler.load_payload(options.input)

named_pipe = payload_handler.payload['workflow']['NamedPipe']

stat_client = StatsPipeClient(named_pipe)

initial = payload_handler.payload['workflow']['dataset']
print "Dataset name: %s" % (initial)

timing = {'stats': {'api': 'listBlocks', 'query': str(initial)}}

## next step (list all blocks in DBS3 below the 'initial' root)
with TimingStat(timing, stat_client):
    blocks = api.listBlocks(dataset=initial)

timer.stat_to_timer()

print "Found %s blocks" % (len(blocks))

for block in blocks:
    p = payload_handler.clone_payload()
    p['workflow']['block_name'] = block['block_name']
    payload_handler.append_payload(p)

payload_handler.save_payload(options.output)
コード例 #2
0
api = create_api('DbsApi', config=config)

payload_handler = PayloadHandler()

payload_handler.load_payload(options.input)

named_pipe = payload_handler.payload['workflow']['NamedPipe']

stat_client = StatsPipeClient(named_pipe)

initial = payload_handler.payload['workflow']['dataset']

timing = {'stats':{'api':'listFiles', 'query':initial}}

## last step (list all files in DBS3 below the 'initial' root)
with TimingStat(timing, stat_client) as timer:
  files = api.listFiles(dataset=initial, detail=True)

timer.update_stats({'server_request_timing' : float(api.request_processing_time)/1000000.0,
                    'server_request_timestamp' : (api.request_time),
                    'request_content_length' : api.content_length})

timer.stat_to_server()

print("Found %s files"  %(len(files)))

for this_file, interval in zip(files, increase_interval(0.0, 0.1)):
  p = payload_handler.clone_payload()
  p['workflow']['logical_file_name'] = this_file['logical_file_name']
  del p['workflow']['dataset']
  #p['workflow']['Intervals']['getFileParents'] += interval
コード例 #3
0
payload_handler = PayloadHandler()

payload_handler.load_payload(options.input)

named_pipe = payload_handler.payload['workflow']['NamedPipe']

stat_client = StatsPipeClient(named_pipe)

initial = payload_handler.payload['workflow']['dataset']

migration_timing = {'migration_stats': {'data': str(initial)}}

migration_url = os.environ.get(
    "DBS_READER_URL", "https://cmsweb.cern.ch:8443/dbs/int/global/DBSReader/")

with TimingStat(migration_timing, stat_client,
                stats_name="migration_stats") as migration_timer:
    request_timing = {'stats': {'api': 'submit', 'query': str(initial)}}

    migration_input = dict(migration_url=migration_url,
                           migration_input=initial)
    print("Putting migration_request: %s" % (migration_input))

    with TimingStat(request_timing, stat_client) as request_timer:
        migration_task = api.submitMigration(migration_input)

    request_processing_time, request_time = api.requestTimingInfo
    request_timer.update_stats({
        'server_request_timing':
        float(request_processing_time) / 1000000.0,
        'server_request_timestamp':
        float(request_time) / 1000000.0,