async def jsroot_legacy(request, notOlderThan): """Returns a JSON representation of a ROOT histogram for provided run:lumi/dataset/path combination""" run, lumi = parse_run_lumi(request.match_info['run']) full_path = request.match_info['path'] # This is caused by a double slash in the url if full_path[0] == '/': full_path = full_path[1:] # Separate dataset and a path within the root file parts = full_path.split('/') dataset = '/' + '/'.join(parts[0:3]) path = '/'.join(parts[3:]) me_description = MEDescription(dataset, path, run, lumi) options = RenderingOptions(json=True) data, error = await service.get_rendered_json([me_description], options, notOlderThan=notOlderThan) if data == b'crashed': return web.HTTPInternalServerError() elif data == b'error': return web.HTTPBadRequest() return web.json_response(data, status=200 if error == 0 else 500)
async def samples_legacy(request, notOlderThan): """Returns a list of matching run/dataset pairs based on provided regex search.""" run, lumi = parse_run_lumi(request.rel_url.query.get('run')) dataset = request.rel_url.query.get('match') samples = await service.get_samples(run, dataset, lumi, notOlderThan=notOlderThan) result = { 'samples': [{ 'type': 'offline_data', 'items': [{ 'run': str(sample.run) if sample.lumi == 0 else '%s:%s' % (sample.run, sample.lumi), 'dataset': sample.dataset } for sample in samples] }] } return web.json_response(result)
async def render_overlay_v1(request, notOlderThan): """Returns a PNG image for provided run:lumi/dataset/path combination""" options = RenderingOptions.from_dict(request.rel_url.query) me_descriptions = [] for obj in request.rel_url.query.getall('obj', []): parts = obj.split('/') run, lumi = parse_run_lumi(parts[1]) dataset = '/' + '/'.join(parts[2:5]) path = '/'.join(parts[5:]) me_description = MEDescription(dataset, path, run, lumi) me_descriptions.append(me_description) data, error = await service.get_rendered_image(me_descriptions, options, notOlderThan=notOlderThan) if data == b'crashed': return web.HTTPInternalServerError() elif data == b'error': return web.HTTPBadRequest() return web.Response(body=data, content_type='image/png', status=200 if error == 0 else 500)
async def render_v1(request, notOlderThan): """Returns a PNG image for provided run:lumi/dataset/path combination""" run, lumi = parse_run_lumi(request.match_info['run']) full_path = request.match_info['path'] options = RenderingOptions.from_dict(request.rel_url.query) # Separate dataset and a path within the root file parts = full_path.split('/') dataset = '/' + '/'.join(parts[0:3]) path = '/'.join(parts[3:]) me_description = MEDescription(dataset, path, run, lumi) data, error = await service.get_rendered_image([me_description], options, notOlderThan=notOlderThan) if data == b'crashed': return web.HTTPInternalServerError() elif data == b'error': return web.HTTPBadRequest() return web.Response(body=data, content_type='image/png', status=200 if error == 0 else 500)
async def archive_v1(request, notOlderThan): """Returns a directory listing for provided run:lumi/dataset/path combination.""" run, lumi = parse_run_lumi(request.match_info['run']) full_path = request.match_info['path'] search = request.rel_url.query.get('search') # Separate dataset and a path within the root file parts = full_path.split('/') dataset = '/' + '/'.join(parts[0:3]) path = '/'.join(parts[3:]) data = await service.get_archive(run, dataset, path, search, lumi, notOlderThan=notOlderThan) if not data: return web.HTTPNotFound() result = {'data': []} result['data'].extend({ 'subdir': name, 'me_count': me_count } for name, me_count in data.dirs) result['data'].extend({ 'name': name, 'path': path, 'layout': layout, 'qtstatuses': [x for x in qteststatuses] } for (name, path, layout, qteststatuses) in data.objs) return web.json_response(result)
async def archive_v1(request, notOlderThan): """Returns a directory listing for provided run:lumi/dataset/path combination.""" run, lumi = parse_run_lumi(request.match_info['run']) full_path = request.match_info['path'] search = request.rel_url.query.get('search') # Separate dataset and a path within the root file parts = full_path.split('/') dataset = '/' + '/'.join(parts[0:3]) path = '/'.join(parts[3:]) data = await service.get_archive(run, dataset, path, search, lumi, notOlderThan=notOlderThan) if not data: return web.HTTPNotFound() result = {'data': []} result['data'].extend({'subdir': name, 'me_count': me_count} for name, me_count in data.dirs) for (name, path, layout, qteststatuses) in data.objs: obj = { 'name': name, 'path': path, 'layout': layout.name if layout != None else None } if layout != None: obj['draw'] = { k:v for k, v in zip(layout.draw._fields, layout.draw) if v is not None } obj['overlays'] = layout.overlays obj['description'] = layout.description obj['qtstatuses'] = [x for x in qteststatuses] result['data'].append(obj) return web.json_response(result)
async def jsroot_overlay(request, notOlderThan): """Returns a list of JSON representations of ROOT histograms for provided run:lumi/dataset/path combinations""" me_descriptions = [] for obj in request.rel_url.query.getall('obj', []): parts = obj.split('/') run, lumi = parse_run_lumi(parts[1]) dataset = '/' + '/'.join(parts[2:5]) path = '/'.join(parts[5:]) me_description = MEDescription(dataset, path, run, lumi) me_descriptions.append(me_description) options = RenderingOptions(json=True) data, error = await service.get_rendered_json(me_descriptions, options, notOlderThan=notOlderThan) if data == b'crashed': return web.HTTPInternalServerError() elif data == b'error': return web.HTTPBadRequest() return web.json_response(data, status = 200 if error == 0 else 500)