def get_hvinfo(cls): ''' 获取 Hypervisor信息,只有admin的用户才有权限,所以调用前需要用admin账号临时登入下 :return: ''' admin_token = get_admin_token() admin_project_id = get_admin_project_id() #path = url_keypairs.format(project_id=project_id) path = url_hv_info.format(project_id=admin_project_id) method = "GET" head = { "Content-Type": "application/json", "X-Auth-Token": admin_token } params = "" ret = send_request(method, IP_nova, PORT_nova, path, params, head) if ret == 1: admin_login() admin_token = get_admin_token() admin_project_id = get_admin_project_id() path = url_hv_info.format(project_id=admin_project_id) head = { "Content-Type": "application/json", "X-Auth-Token": admin_token } ret = send_request(method, IP_nova, PORT_nova, path, params, head) assert ret != 1, "send_request error" return ret
def update(self, uid, write_key, new={}, removed=(), deletion_policy=None, compression_policy=None, timeout=2.5 ): """ Updates attributes of an existing record on a server. `timeout` defaults to 2.5s. All other arguments are the same as in ``media_storage.interfaces.ControlConstruct.update``. """ request = common.assemble_request(self._server.get_host() + common.SERVER_UPDATE, { 'uid': uid, 'keys': { 'write': write_key, }, 'policy': { 'delete': deletion_policy, 'compress': compression_policy, }, 'meta': { 'new': new, 'removed': removed, }, }) common.send_request(request, timeout=timeout)
def unlink(self, uid, write_key, timeout=2.5): """ Unlinks the identified data on the server. `timeout` defaults to 2.5s. All other arguments are the same as in ``media_storage.interfaces.ControlConstruct.unlink``. """ request = common.assemble_request(self._server.get_host() + common.SERVER_UNLINK, { 'uid': uid, 'keys': { 'write': write_key, }, }) common.send_request(request, timeout=timeout)
def get_jobs_by_queue(state, queue): logger.debug('Executing get_jobs_by_queue with params : {0} & {1}'.format( state, queue)) try: req_url = get_jobs_by_all(state, '', queue) logger.debug('Job url is %s' % req_url) response = capi.send_request(req_url) if response != "" and response.ok: data = get_job_details(response) if data is None or not data: logger.debug( 'Looks like no job is running for state: {0}, queue: {1}'. format(state, queue)) return None else: return capi.get_json_data(data) else: logger.error( 'Something went wrong, unable to fetch jobs for queue : {0}'. format(queue)) except Exception as e: logger.error('Exception from get_jobs_by_queue: {0}'.format(e.message)) tb = traceback.format_exc() logger.error('error trace: %s ' % tb) raise e
def get(self, uid, read_key, output_file=None, decompress_on_server=False, timeout=5.0): """ Retrieves the requested data from the local proxy, returning its MIME and the decompressed content as an open filehandle in a tuple. `output_file` is the path of the file to which the response is written; an anonymous tempfile is used by default. If supplied, the caller is responsble for cleaning it up. `decompress_on_server` is ignored; the caching proxy is responsible for that. `timeout` defaults to 5.0s. All other arguments are the same as in ``media_storage.interfaces.RetrievalConstruct.get``. """ request = common.assemble_request(self._proxy + common.SERVER_GET, { 'uid': uid, 'keys': { 'read': read_key, }, 'proxy': { 'server': self._server.to_dictionary(), }, }) (properties, response) = common.send_request(request, timeout=timeout) if not output_file: output = tempfile.SpooledTemporaryFile(_TEMPFILE_SIZE) else: output = open(output_file, 'wb') output.write(response) output.seek(0) return (properties.get(common.PROPERTY_CONTENT_TYPE), output)
def show_service_version(catalog_host, service_id, prod_version, sdl_version, headers): url = "v1/services" + "/" + service_id + "/" + "product_versions" + "/" \ + prod_version + "/" + "sdl_versions" + "/" + sdl_version catalog_host = "http://" + catalog_host req_url = '%s/%s' % (catalog_host, url) headers = {'Authorization': headers} return common.send_request(req_url, method='GET', headers=headers)
def ping(self, timeout=1.0): """ Indicates whether the server is online or not, raising an exception in case of failure. `timeout` is the number of seconds to allow for pinging to complete, defaulting to 1.0s. """ request = common.assemble_request(self._server.get_host() + common.SERVER_PING, {}) (properties, response) = common.send_request(request, timeout=timeout) return json.loads(response)
def list_families(self, timeout=2.5): """ Enumerates all families currently defined on the server, returning a sorted list of strings. `timeout` is the number of seconds to allow for retrieval to complete, defaulting to 2.5s. """ request = common.assemble_request(self._server.get_host() + common.SERVER_LIST_FAMILIES, {}) (properties, response) = common.send_request(request, timeout=timeout) return json.loads(response)['families']
def query(self, query, timeout=5.0): """ Returns a list of matching records, up to the server's limit. `timeout` defaults to 5.0s. All other arguments are the same as in ``media_storage.interfaces.ControlConstruct.query``. """ request = common.assemble_request(self._server.get_host() + common.SERVER_QUERY, query.to_dict()) (properties, response) = common.send_request(request, timeout=timeout) return json.loads(response)['records']
def put(self, data, mime, family=None, comp=compression.COMPRESS_NONE, compress_on_server=False, deletion_policy=None, compression_policy=None, meta=None, uid=None, keys=None, timeout=10.0 ): """ Stores data on a server, returning a dictionary containing the keys 'uid', which points to the UID of the stored data, and 'keys', which is another dictionary containing 'read' and 'write', the keys needed to perform either type of action on the stored data. `data` is a string or file-like object containing the payload to be stored and `mime` is the MIME-type of the data. `timeout` defaults to 10.0s, but should be adjusted depending on your needs. All other arguments are the same as in ``media_storage.interfaces.ControlConstruct.put``. """ description = { 'uid': uid, 'keys': keys, 'physical': { 'family': family, 'format': { 'mime': mime, 'comp': comp, }, }, 'policy': { 'delete': deletion_policy, 'compress': compression_policy, }, 'meta': meta, } headers = {} if comp: if not compress_on_server: try: if type(data) in types.StringTypes: #The compressors expect file-like objects data = StringIO.StringIO(data) data = compression.get_compressor(comp)(data) except ValueError: headers[common.HEADER_COMPRESS_ON_SERVER] = common.HEADER_COMPRESS_ON_SERVER_TRUE else: headers[common.HEADER_COMPRESS_ON_SERVER] = common.HEADER_COMPRESS_ON_SERVER_TRUE request = common.assemble_request(self._server.get_host() + common.SERVER_PUT, description, headers=headers, data=data) (properties, response) = common.send_request(request, timeout=timeout) return json.loads(response)
def get_keypairs(cls): ''' 获取密钥对 :return: ''' token = get_token() project_id = get_proid() path = url_keypairs.format(project_id=project_id) method = "GET" head = {"Content-Type": "application/json", "X-Auth-Token": token} params = "" ret = send_request(method, IP_nova, PORT_nova, path, params, head) assert ret != 1, "send_request error" return ret
def get_job_counts(user_param): try: state = user_param.lower() logger.debug('User param is : %s' % state) if get_appstats_urls(state) is not None: req_url = get_appstats_urls(state) logger.debug('Job url is %s' % req_url) response = capi.send_request(req_url) if response != "" and response.ok: return get_state_counts(response) else: logger.error('Something went wrong, app_stats url is empty.') raise Exception('Empty app_stats url') except Exception as e: logger.error('Exception from get_job_counts: {0}'.format(e.message))
def put(self, data, mime, family=None, comp=compression.COMPRESS_NONE, compress_on_server=False, deletion_policy=None, compression_policy=None, meta=None, uid=None, keys=None, timeout=3.0 ): """ Stores data in the proxy's buffers, immediately returning a dictionary containing the keys 'uid', which points to the eventual UID of the stored data, and 'keys', which is another dictionary containing 'read' and 'write', the keys needed to perform either type of action on the stored data. It is important to note that the data is NOT actually stored when this pointer is returned, but rather that the pointer will be valid at some point in the future (typically very soon, but not within a predictable timeframe). `data` is a string containing the local filesystem path of the data to store and `mime` is the MIME-type of the data. `timeout` defaults to 3.0s, but should be adjusted depending on your server's performance. All other arguments are the same as in ``media_storage.interfaces.ControlConstruct.put``. """ description = { 'uid': uid, 'keys': keys, 'physical': { 'family': family, 'format': { 'mime': mime, 'comp': comp, }, }, 'policy': { 'delete': deletion_policy, 'compress': compression_policy, }, 'meta': meta, 'proxy': { 'server': self._server.to_dictionary(), 'data': data, }, } request = common.assemble_request(self._proxy + common.SERVER_PUT, description) (properties, response) = common.send_request(request, timeout=timeout) return json.loads(response)
def status(self, timeout=2.5): """ Yields a dictionary of load data from the server:: 'process': { 'cpu': {'percent': 0.1,}, 'memory': {'percent': 1.2, 'rss': 8220392,}, 'threads': 4, }, 'system': { 'load': {'t1': 0.2, 't5': 0.5, 't15': 0.1,}, } `timeout` is the number of seconds to allow for retrieval to complete, defaulting to 2.5s. """ request = common.assemble_request(self._server.get_host() + common.SERVER_STATUS, {}) (properties, response) = common.send_request(request, timeout=timeout) return json.loads(response)
def get_domain_id(cls, query_dict=None): ''' 获取domain_id :return: ''' admin_token = get_admin_token() path = url_domain_info if query_dict: query_str = urllib.urlencode(query_dict) path = "%s?%s" % (path, query_str) method = "GET" head = { "Content-Type": "application/json", "X-Auth-Token": admin_token } params = "" ret = send_request(method, IP_keystone, PORT_keystone, path, params, head) return ret
def get_jobs_by_queue(state, queue): logger.debug('Executing get_jobs_by_queue with params : {0} & {1}'.format(state, queue)) try: req_url = get_jobs_by_all(state, '', queue) logger.debug('Job url is %s' % req_url) response = capi.send_request(req_url) if response != "" and response.ok: data = get_job_details(response) if data is None or not data: logger.debug('Looks like no job is running for state: {0}, queue: {1}'.format(state, queue)) return None else: return capi.get_json_data(data) else: logger.error('Something went wrong, unable to fetch jobs for queue : {0}'.format(queue)) except Exception as e: logger.error('Exception from get_jobs_by_queue: {0}'.format(e.message)) tb = traceback.format_exc() logger.error('error trace: %s ' % tb) raise e
def describe(self, uid, read_key, timeout=2.5): """ Retrieves the requested record from the local proxy as a dictionary. `timeout` defaults to 2.5s. All other arguments are the same as in ``media_storage.interfaces.ControlConstruct.describe``. """ request = common.assemble_request(self._proxy + common.SERVER_DESCRIBE, { 'uid': uid, 'keys': { 'read': read_key, }, 'proxy': { 'server': self._server.to_dictionary(), }, }) (properties, response) = common.send_request(request, timeout=timeout) return json.loads(response)
def describe(self, uid, read_key, timeout=2.5): """ Retrieves the requested record from the server as a dictionary. `timeout` defaults to 2.5s. All other arguments are the same as in ``media_storage.interfaces.ControlConstruct.describe``. """ request = common.assemble_request(self._server.get_host() + common.SERVER_DESCRIBE, { 'uid': uid, 'keys': { 'read': read_key, }, }) (properties, response) = common.send_request(request, timeout=timeout) response = json.loads(response) if not response['physical']['exists']: raise common.NotPresentError(response) return response
def get(self, uid, read_key, output_file=None, decompress_on_server=False, timeout=5.0): """ Retrieves the requested data from the server, returning its MIME and the decompressed content as a file-like object (optionally that supplied as `output_file`) in a tuple; the file-like object has a ``length`` parameter that contains its length in bytes. `output_file` is an optional file-like object to which data should be written (a spooled tempfile is used by default). `timeout` defaults to 5.0s. All other arguments are the same as in ``media_storage.interfaces.ControlConstruct.get``. """ headers = {} if not decompress_on_server: #Tell the server what the client supports headers[common.HEADER_SUPPORTED_COMPRESSION] = common.HEADER_SUPPORTED_COMPRESSION_DELIMITER.join(compression.SUPPORTED_FORMATS) request = common.assemble_request(self._server.get_host() + common.SERVER_GET, { 'uid': uid, 'keys': { 'read': read_key, }, }, headers=headers) if not output_file: output = tempfile.SpooledTemporaryFile(_TEMPFILE_SIZE) else: output = output_file properties = common.send_request(request, output=output, timeout=timeout) length = properties.get(common.PROPERTY_CONTENT_LENGTH) if properties.get(common.PROPERTY_APPLIED_COMPRESSION): output = compression.get_decompressor(properties.get(common.PROPERTY_APPLIED_COMPRESSION))(output) if output_file: #The decompression process returns a tempfile output_file.seek(0) output_file.truncate() length = common.transfer_data(output, output_file) output_file.seek(0) output = output_file output.length = length return (properties.get(common.PROPERTY_CONTENT_TYPE), output)
def configure_instance(catalog_host, instance_id, service_id, description, product_version, sdl_version, vendor, headers, **kwargs): parameters = [] if kwargs.get('parameters'): parameters = kwargs.get('parameters') post_body = { "service_id": service_id, "description": description, "product_version": product_version, "sdl_version": sdl_version, "vendor": vendor, "parameters": parameters } body = json.dumps(post_body) headers = {'Content-Type': 'application/json', 'Authorization': headers} url = "v1/instances" + "/" + instance_id catalog_host = "http://" + catalog_host req_url = '%s/%s' % (catalog_host, url) return common.send_request(req_url, method='PUT', body=body, headers=headers)
def create_instance(catalog_host, instance_id, service_id, labels, product_version, sdl_version, description, headers, **kwargs): data = {} request_data = { "instance_id": instance_id, "description": description, "service_id": service_id, "product_version": product_version, "sdl_version": sdl_version, "labels": labels } if kwargs.get('parameters'): data = kwargs.get('parameters') request_data['parameters'] = data body = json.dumps(request_data) headers = {'Content-Type': 'application/json', 'Authorization': headers} url = "v1/instances" catalog_host = "http://" + catalog_host req_url = '%s/%s' % (catalog_host, url) return common.send_request(req_url, method='POST', body=body, headers=headers)
def download_test(self, test_name, test_hash): test_path = config.PATH_TESTS + test_hash # Check if file already exists if path.exists(test_path): return # If not, we should download it url = self.tests_url + test_name self.logger.info( "[Submission {}] Downloading file {} with hash {} from URL: {}". format(self.id, test_name, test_hash, url)) response = send_request("GET", url) if response.status_code != 200: self.logger.error( "[Submission {}] Could not download test {} with hash {} using URL: {}" .format(self.id, test_name, test_hash, url)) raise Exception("Could not download test file!") with open(test_path, "wb") as file: # Write 1MB chunks from the file at a time for chunk in response.iter_content( config.FILE_DOWNLOAD_CHUNK_SIZE): file.write(chunk)
def list_categories(): url = "v1/categories" catalog_host = "http://" + catalog_host req_url = '%s/%s' % (catalog_host, url) return common.send_request(req_url, method='GET')
def delete_instance(catalog_host, instance_id, headers): url = "v1/instances" + "/" + instance_id catalog_host = "http://" + catalog_host req_url = '%s/%s' % (catalog_host, url) headers = {'Authorization': headers} return common.send_request(req_url, method='DELETE', headers=headers)
def list_instances(catalog_host, headers): url = "v1/instances" catalog_host = "http://" + catalog_host req_url = '%s/%s' % (catalog_host, url) headers = {'Authorization': headers} return common.send_request(req_url, method='GET', headers=headers)
def show_service(catalog_host, service_id, headers): url = "v1/services" + "/" + service_id catalog_host = "http://" + catalog_host req_url = '%s/%s' % (catalog_host, url) headers = {'Authorization': headers} return common.send_request(req_url, method='GET', headers=headers)