def search(self, database, string): string = '%%%s%%' % string.lower() tags = self.connections[database].fetch(''' select * from ( select name, time_type, object_type, synchronization, insertion_time, description from tag where lower(name) like :s or lower(object_type) like :s or lower(description) like :s ) where rownum <= :s ''', (string, string, string, limit)) payloads = self.connections[database].fetch(''' select * from ( select hash, object_type, version, insertion_time from payload where lower(hash) like :s or lower(object_type) like :s ) where rownum <= :s ''', (string, string, limit)) gts = self.connections[database].fetch(''' select * from ( select name, release, insertion_time, description from global_tag where lower(name) like :s or lower(release) like :s or lower(description) like :s ) where rownum <= :s ''', (string, string, string, limit)) service.setResponseJSON() return json.dumps({ 'tags': { 'headers': ['Tag', 'Time Type', 'Object Type', 'Synchronization', 'Insertion Time', 'Description'], 'data': tags, }, 'payloads': { 'headers': ['Payload', 'Object Type', 'Version', 'Insertion Time'], 'data': payloads, }, 'gts': { 'headers': ['Global Tag', 'Release', 'Insertion Time', 'Description'], 'data': gts, }, }, default = lambda obj: obj.strftime('%Y-%m-%d %H:%M:%S,%f') if isinstance(obj, datetime.datetime) else None )
def hardwarearchitecture_list(self, *args, **kwargs): """ Returs list of currently active hardware architectures (Mac OS architectures are excluded). No arguments required. Response example: { "body": { "hardware_architectures": [ "slc5_amd64_gcc434", "slc5_amd64_gcc451", "slc5_ia32_gcc434", "slc5_amd64_gcc462" ] }, "header": { "date_updated": "2012-05-14T00:00:00" } } """ ha = record_lib.hardware_architecture_provider() ha_updated = Settings.HARDWARE_ARCHITECTURES_UPDATED rez = { 'header': { 'date_updated': ha_updated.strftime('%Y-%m-%dT%H:%M:%S'), }, 'body': { 'hardware_architectures': ha, } } return service.setResponseJSON(rez)
def softwarerelease_list(self, include_prereleases=True, from_major=None, till_major=None, *args, **kwargs): include_prereleases = bool(include_prereleases) if from_major is not None: from_major = int(from_major) if till_major is not None: till_major = int(till_major) srm = record_lib.SoftwareReleaseManager() software_releases_list = srm.list_software_releases( include_prereleases, from_major, till_major) flat_software_releases_list = self._flat_software_releases_list( software_releases_list) rez = { 'header': { 'filter_aruments': { 'include_prereleases': include_prereleases, 'from_major': from_major, 'till_major': till_major, }, }, 'body': { 'software_releases': flat_software_releases_list } } return service.setResponseJSON(rez)
def dumpDatabase(self): """Dump contents of the database for testing. """ logging.debug("server::dumpDatabase()") return service.setResponseJSON(dropBox.dumpDatabase())
def dumpDatabase(self): '''Dump contents of the database for testing. ''' logging.debug('server::dumpDatabase()') return service.setResponseJSON(dropBox.dumpDatabase())
def hardwarearchitecture_list(self, *args, **kwargs): """ Returs list of currently active hardware architectures (Mac OS architectures are excluded). No arguments required. Response example: { "body": { "hardware_architectures": [ "slc5_amd64_gcc434", "slc5_amd64_gcc451", "slc5_ia32_gcc434", "slc5_amd64_gcc462" ] }, "header": { "date_updated": "2012-05-14T00:00:00" } } """ ha = record_lib.hardware_architecture_provider() ha_updated = Settings.HARDWARE_ARCHITECTURES_UPDATED rez = { 'header':{ 'date_updated': ha_updated.strftime('%Y-%m-%dT%H:%M:%S'), }, 'body':{ 'hardware_architectures': ha, } } return service.setResponseJSON(rez)
def softwarerelease_dict(self,include_prereleases=True, from_major=None, till_major=None): include_prereleases = bool(include_prereleases) if from_major is not None: from_major = int(from_major) if till_major is not None: till_major = int(till_major) srm = record_lib.SoftwareReleaseManager() software_releases_by_architecture = srm.list_software_releases(include_prereleases, from_major, till_major, group_by_hardware_architecture=True) flat_software_releases_by_architecture = [] for hardware_architecture_name, software_releases_list in software_releases_by_architecture.items(): flat_software_releases_list = self._flat_software_releases_list(software_releases_list) flat_software_releases_by_architecture.append( {'hardware_architecture_name':hardware_architecture_name, 'software_releases':flat_software_releases_list} ) rez = { 'header':{ 'filter_aruments':{ 'include_prereleases':include_prereleases, 'from_major':from_major, 'till_major':till_major, }, }, 'body':{ 'hardware_architectures':flat_software_releases_by_architecture, } } return service.setResponseJSON(rez)
def softwarerelease_majorversion_list(self): rez = { 'header': {}, 'body':{ 'software_release_major_versions':record_lib.SoftwareReleaseManager().major_version_list() } } return service.setResponseJSON(rez)
def softwarerelease_majorversion_list(self): rez = { 'header': {}, 'body': { 'software_release_major_versions': record_lib.SoftwareReleaseManager().major_version_list() } } return service.setResponseJSON(rez)
def list_(self, database, type_, item): service.setResponseJSON() if type_ == 'tags': time_type = self.connections[database].fetch( ''' select time_type from tag where name = :s ''', (item, ))[0][0] return json.dumps( { 'headers': ['Since', 'Insertion Time', 'Payload'], 'data': _render_sinces( time_type, self.connections[database].fetch( ''' select * from ( select since, insertion_time, payload_hash from iov where tag_name = :s order by since desc, insertion_time desc ) where rownum <= :s ''', (item, limit))), }, default=lambda obj: obj.strftime('%Y-%m-%d %H:%M:%S,%f') if isinstance(obj, datetime.datetime) else None) if type_ == 'gts': return json.dumps({ 'headers': ['Record', 'Label', 'Tag'], 'data': self.connections[database].fetch( ''' select record, label, tag_name from global_tag_map where global_tag_name = :s ''', (item, )), }) raise Exception('Wrong type requested for listing.')
def index(self, **kwargs) : if 'help' in kwargs : return self.help( ) if 'up' in kwargs : return self.up( ) try: return service.setResponseJSON( self.getLumi( **kwargs ) ) except lumi.BusyLumiError: raise cherrypy.HTTPError(503, 'getLumi is busy processing other requests. Please try again later.')
def record_container_map(self, hardware_architecture_name, software_release_name, *args, **kwargs): cache_key = hardware_architecture_name + "#record#container#map#" + software_release_name cache_rez = ServerCache.cache_get(cache_key, max_age=31536000) #max age ~1year if cache_rez is None: record_container_map = record_lib.ContainerRecordProvider( ).provide(hardware_architecture_name, software_release_name) record_container_list = [{ 'record_name': record, 'container': container } for record, container in record_container_map.items()] cached_at = datetime.datetime.now() for_cache = { 'record_container_list': record_container_list, 'cached_at': cached_at, 'hardware_architecture_name': hardware_architecture_name, 'software_release_name': software_release_name } ServerCache.cache_put(cache_key, for_cache) else: record_container_list = cache_rez['record_container_list'] cached_at = cache_rez['cached_at'] if cache_rez.get('hardware_architecture_name') != hardware_architecture_name or \ cache_rez.get('software_release_name') != software_release_name: #we got hash collision. solving record_container_map = record_lib.ContainerRecordProvider( ).provide(hardware_architecture_name, software_release_name) record_container_list = [{ 'record_name': record, 'container': container } for record, container in record_container_map.items()] cached_at = datetime.datetime.now() for_cache = { 'record_container_list': record_container_list, 'cached_at': cached_at, 'hardware_architecture_name': hardware_architecture_name, 'software_release_name': software_release_name } ServerCache.cache_put(cache_key, for_cache) rez = { 'header': { 'cached_at': cached_at.strftime('%Y-%m-%dT%H:%M:%S'), 'hardware_architecture_name': hardware_architecture_name, 'software_release_name': software_release_name, }, 'body': { 'record_container_map': record_container_list } } return service.setResponseJSON(rez)
def index(self, **kwargs): if 'help' in kwargs: return self.help() if 'up' in kwargs: return self.up() try: return service.setResponseJSON(self.getLumi(**kwargs)) except lumi.BusyLumiError: raise cherrypy.HTTPError( 503, 'getLumi is busy processing other requests. Please try again later.' )
def getFileList(self, backend): '''Returns a JSON list of files yet to be pulled from online for the matching backend. Called from online. The name of each file is the SHA1 checksum of the file itself. ''' logging.debug('-' * 80) logging.debug('server::getFileList(%s)', backend) return service.setResponseJSON(dropBox.getFileList(backend))
def getFileList(self, backend): """Returns a JSON list of files yet to be pulled from online for the matching backend. Called from online. The name of each file is the SHA1 checksum of the file itself. """ logging.debug("-" * 80) logging.debug("server::getFileList(%s)", backend) return service.setResponseJSON(dropBox.getFileList(backend))
def list_(self, database, type_, item): service.setResponseJSON() if type_ == 'tags': time_type = self.connections[database].fetch(''' select time_type from tag where name = :s ''', (item, ))[0][0] return json.dumps({ 'headers': ['Since', 'Insertion Time', 'Payload'], 'data': _render_sinces(time_type, self.connections[database].fetch(''' select * from ( select since, insertion_time, payload_hash from iov where tag_name = :s order by since desc, insertion_time desc ) where rownum <= :s ''', (item, limit))), }, default = lambda obj: obj.strftime('%Y-%m-%d %H:%M:%S,%f') if isinstance(obj, datetime.datetime) else None ) if type_ == 'gts': return json.dumps({ 'headers': ['Record', 'Label', 'Tag'], 'data': self.connections[database].fetch(''' select record, label, tag_name from global_tag_map where global_tag_name = :s ''', (item, )), }) raise Exception('Wrong type requested for listing.')
def softwarerelease_dict(self, include_prereleases=True, from_major=None, till_major=None): include_prereleases = bool(include_prereleases) if from_major is not None: from_major = int(from_major) if till_major is not None: till_major = int(till_major) srm = record_lib.SoftwareReleaseManager() software_releases_by_architecture = srm.list_software_releases( include_prereleases, from_major, till_major, group_by_hardware_architecture=True) flat_software_releases_by_architecture = [] for hardware_architecture_name, software_releases_list in software_releases_by_architecture.items( ): flat_software_releases_list = self._flat_software_releases_list( software_releases_list) flat_software_releases_by_architecture.append({ 'hardware_architecture_name': hardware_architecture_name, 'software_releases': flat_software_releases_list }) rez = { 'header': { 'filter_aruments': { 'include_prereleases': include_prereleases, 'from_major': from_major, 'till_major': till_major, }, }, 'body': { 'hardware_architectures': flat_software_releases_by_architecture, } } return service.setResponseJSON(rez)
def record_container_map(self, hardware_architecture_name, software_release_name, *args, **kwargs): cache_key = hardware_architecture_name+"#record#container#map#"+software_release_name cache_rez = ServerCache.cache_get(cache_key, max_age=31536000)#max age ~1year if cache_rez is None: record_container_map = record_lib.ContainerRecordProvider().provide(hardware_architecture_name, software_release_name) record_container_list = [ {'record_name':record, 'container':container} for record, container in record_container_map.items() ] cached_at = datetime.datetime.now() for_cache = {'record_container_list':record_container_list,'cached_at':cached_at, 'hardware_architecture_name':hardware_architecture_name, 'software_release_name':software_release_name} ServerCache.cache_put(cache_key, for_cache) else: record_container_list = cache_rez['record_container_list'] cached_at = cache_rez['cached_at'] if cache_rez.get('hardware_architecture_name') != hardware_architecture_name or \ cache_rez.get('software_release_name') != software_release_name: #we got hash collision. solving record_container_map = record_lib.ContainerRecordProvider().provide(hardware_architecture_name, software_release_name) record_container_list = [ {'record_name':record, 'container':container} for record, container in record_container_map.items() ] cached_at = datetime.datetime.now() for_cache = {'record_container_list':record_container_list,'cached_at':cached_at, 'hardware_architecture_name':hardware_architecture_name, 'software_release_name':software_release_name} ServerCache.cache_put(cache_key, for_cache) rez = { 'header':{ 'cached_at': cached_at.strftime('%Y-%m-%dT%H:%M:%S'), 'hardware_architecture_name':hardware_architecture_name, 'software_release_name':software_release_name, }, 'body':{ 'record_container_map':record_container_list } } return service.setResponseJSON(rez)
def softwarerelease_list(self, include_prereleases=True, from_major=None, till_major=None, *args, **kwargs): include_prereleases = bool(include_prereleases) if from_major is not None: from_major = int(from_major) if till_major is not None: till_major = int(till_major) srm = record_lib.SoftwareReleaseManager() software_releases_list = srm.list_software_releases(include_prereleases, from_major, till_major) flat_software_releases_list = self._flat_software_releases_list(software_releases_list) rez = { 'header':{ 'filter_aruments':{ 'include_prereleases':include_prereleases, 'from_major':from_major, 'till_major':till_major, }, }, 'body':{ 'software_releases':flat_software_releases_list } } return service.setResponseJSON(rez)
def api(self): '''Returns the API of the class in JSON. ''' return service.setResponseJSON(jsonApi, encode = False)
def search(self, database, string): string = '%%%s%%' % string.lower() tags = self.connections[database].fetch( ''' select * from ( select name, time_type, object_type, synchronization, insertion_time, description from tag where lower(name) like :s or lower(object_type) like :s or lower(description) like :s ) where rownum <= :s ''', (string, string, string, limit)) payloads = self.connections[database].fetch( ''' select * from ( select hash, object_type, version, insertion_time from payload where lower(hash) like :s or lower(object_type) like :s ) where rownum <= :s ''', (string, string, limit)) gts = self.connections[database].fetch( ''' select * from ( select name, release, insertion_time, description from global_tag where lower(name) like :s or lower(release) like :s or lower(description) like :s ) where rownum <= :s ''', (string, string, string, limit)) service.setResponseJSON() return json.dumps( { 'tags': { 'headers': [ 'Tag', 'Time Type', 'Object Type', 'Synchronization', 'Insertion Time', 'Description' ], 'data': tags, }, 'payloads': { 'headers': ['Payload', 'Object Type', 'Version', 'Insertion Time'], 'data': payloads, }, 'gts': { 'headers': ['Global Tag', 'Release', 'Insertion Time', 'Description'], 'data': gts, }, }, default=lambda obj: obj.strftime('%Y-%m-%d %H:%M:%S,%f') if isinstance(obj, datetime.datetime) else None)
def up(self): return service.setResponseJSON( [] )
def up(self): return service.setResponseJSON([])
def api(self): '''Returns the API of the class in JSON. ''' return service.setResponseJSON(jsonApi, encode=False)
def env(self, service): '''Prints the environment of a service's processes. ''' return setResponseJSON(keeper.getEnvironment(service))