from LifeCycleTests.LifeCycleTools.Timing import TimingStat from LifeCycleTests.LifeCycleTools.OptParser import get_command_line_options from LifeCycleTests.LifeCycleTools.StatsClient import StatsPipeClient import os import sys from random import shuffle options = get_command_line_options(__name__, sys.argv) config = {'url':os.environ.get("DBS_READER_URL", "https://cmsweb.cern.ch:8443/dbs/int/global/DBSReader/")} api = create_api('DbsApi', config=config) payload_handler = PayloadHandler() payload_handler.load_payload(options.input) named_pipe = payload_handler.payload['workflow']['NamedPipe'] stat_client = StatsPipeClient(named_pipe) initial = payload_handler.payload['workflow']['InitialRequest'] print("Initial request string: %s" % (initial)) ## first step (list all datasets in DBS3 below the 'initial' root) timing = {'stats':{'api':'listDatasets', 'query':str(initial)}} with TimingStat(timing, stat_client) as timer:
import os import sys import tempfile options = get_command_line_options(__name__, sys.argv) config = { 'url': os.environ.get("DBS_READER_URL", "https://cmsweb.cern.ch/dbs/int/global/DBSReader/") } api = create_api('DbsApi', config=config) payload_handler = PayloadHandler() payload_handler.load_payload(options.input) named_pipe = payload_handler.payload['workflow']['NamedPipe'] stat_client = StatsPipeClient(named_pipe) initial = payload_handler.payload['workflow']['dataset'] ## list primary data type timing = {'stats': {'query': initial, 'api': 'listPrimaryDSTypes'}} with TimingStat(timing, stat_client) as timer: ds_type = api.listPrimaryDSTypes(dataset=initial)[0] request_processing_time, request_time = api.requestTimingInfo
from LifeCycleTests.LifeCycleTools.PayloadHandler import PayloadHandler, increase_interval from LifeCycleTests.LifeCycleTools.Timing import TimingStat from LifeCycleTests.LifeCycleTools.OptParser import get_command_line_options from LifeCycleTests.LifeCycleTools.StatsClient import StatsPipeClient import os import sys import tempfile options = get_command_line_options(__name__, sys.argv) config = {'url':os.environ.get("DBS_READER_URL", "https://cmsweb.cern.ch/dbs/int/global/DBSReader/")} api = create_api('DbsApi', config=config) payload_handler = PayloadHandler() payload_handler.load_payload(options.input) named_pipe = payload_handler.payload['workflow']['NamedPipe'] stat_client = StatsPipeClient(named_pipe) initial = payload_handler.payload['workflow']['dataset'] ## list primary data type timing = {'stats':{'query' : initial, 'api' : 'listPrimaryDSTypes'}} with TimingStat(timing, stat_client) as timer: ds_type = api.listPrimaryDSTypes(dataset=initial)[0] request_processing_time, request_time = api.requestTimingInfo
import os import sys import time options = get_command_line_options(__name__, sys.argv) config = { 'url': os.environ.get("DBS_WRITER_URL", "https://cmsweb.cern.ch:8443/dbs/int/global/DBSWriter/") } api = create_api('DbsApi', config=config) payload_handler = PayloadHandler() payload_handler.load_payload(options.input) # check if running a stress or integration test (Means collect statistics or not) try: named_pipe = payload_handler.payload['workflow']['NamedPipe'] except KeyError: stat_client = None else: stat_client = StatsPipeClient(named_pipe) block_dump = payload_handler.payload['workflow']['DBS'] injection_repetition_rate = payload_handler.payload['workflow'][ 'InjectionRepetitionRate']