def metric_detail(request, name): """ Get, update or delete a metric workload from the registry. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': metric = r.hgetall("metric:" + str(name)) return JSONResponse(metric, status=200) if request.method == 'PUT': if not r.exists('metric:' + str(name)): return JSONResponse('Metric with name: ' + str(name) + ' not exists.', status=404) data = JSONParser().parse(request) r.hmset('metric:' + str(name), data) return JSONResponse('The metadata of the metric workload with name: ' + str(name) + ' has been updated', status=201) if request.method == 'DELETE': r.delete("metric:" + str(name)) return JSONResponse('Metric workload has been deleted', status=204) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def add_dynamic_filter(request): """ Add a filter with its default parameters in the registry (redis). List all the dynamic filters registered. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': keys = r.keys("dsl_filter:*") dynamic_filters = [] for key in keys: dynamic_filter = r.hgetall(key) dynamic_filter["name"] = key.split(":")[1] dynamic_filters.append(dynamic_filter) return JSONResponse(dynamic_filters, status=200) if request.method == 'POST': data = JSONParser().parse(request) name = data.pop("name", None) if not name: return JSONResponse('Filter must have a name', status=400) r.hmset('dsl_filter:' + str(name), data) return JSONResponse('Filter has been added to the registy', status=201) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def get(self, request, controller_id): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if r.exists('controller:' + str(controller_id)): global_controller_path = os.path.join( settings.GLOBAL_CONTROLLERS_DIR, str( r.hget('controller:' + str(controller_id), 'controller_name'))) if os.path.exists(global_controller_path): global_controller_name = os.path.basename( global_controller_path) global_controller_size = os.stat( global_controller_path).st_size # Generate response response = StreamingHttpResponse( FileWrapper(open(global_controller_path), global_controller_size), content_type=mimetypes.guess_type( global_controller_path)[0]) response['Content-Length'] = global_controller_size response[ 'Content-Disposition'] = "attachment; filename=%s" % global_controller_name return response else: return HttpResponse(status=status.HTTP_404_NOT_FOUND) else: return HttpResponse(status=status.HTTP_404_NOT_FOUND)
def node_restart(request, node_id): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) logger.debug('Node id: ' + str(node_id)) if request.method == 'PUT': node = r.hgetall('node:' + str(node_id)) logger.debug('Node data: ' + str(node)) data = { 'node_ip': node['ip'], 'ssh_username': node['ssh_username'], 'ssh_password': node['ssh_password'] } restart_command = 'sshpass -p {ssh_password} ssh {ssh_username}@{node_ip} sudo swift-init main restart'.format( **data) logger.debug('Command: ' + str(restart_command)) ret = os.system(restart_command) if ret != 0: logger.error('An error occurred restarting Swift nodes') raise FileSynchronizationException( "An error occurred restarting Swift nodes") logger.debug('Node ' + str(node_id) + ' was restarted!') return JSONResponse('The node was restarted successfully.', status=status.HTTP_200_OK) logger.error('Method ' + str(request.method) + ' not allowed.') return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def run(): """ When the controller is started (or restarted) all the actors are stopped, so we need to ensure the correct values in redis. """ # Add source directories to sys path sys.path.insert(0, settings.GLOBAL_CONTROLLERS_DIR) r = get_redis_connection() # Workload metric definitions for key in r.keys('workload_metric:*'): r.hset(key, 'enabled', False) # Workload metric Actors for key in r.keys('metric:*'): r.delete(key) # Dynamic policies for key in r.keys('policy:*'): r.hset(key, 'alive', 'False') # Global controllers for key in r.keys('controller:*'): r.hset(key, 'enabled', 'False')
def load_policies(): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) dynamic_policies = r.keys("policy:*") if dynamic_policies: logger.info("Starting dynamic rules stored in redis") host = create_local_host() for policy in dynamic_policies: policy_data = r.hgetall(policy) if policy_data['alive'] == 'True': _, rule_parsed = dsl_parser.parse( policy_data['policy_description']) target = rule_parsed.target[0][1] # Tenant ID or tenant+container for action_info in rule_parsed.action_list: if action_info.transient: logger.info("Transient rule: " + policy_data['policy_description']) rule_actors[policy] = host.spawn_id( str(policy), settings.RULE_TRANSIENT_MODULE, settings.RULE_TRANSIENT_CLASS, [rule_parsed, action_info, target, host]) rule_actors[policy].start_rule() else: logger.info("Rule: " + policy_data['policy_description']) rule_actors[policy] = host.spawn_id( str(policy), settings.RULE_MODULE, settings.RULE_CLASS, [rule_parsed, action_info, target, host]) rule_actors[policy].start_rule()
def storage_node_detail(request, snode_id): """ Get, update or delete a storage node from the registry. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': storage_node = r.hgetall("SN:" + str(snode_id)) return JSONResponse(storage_node, status=200) if request.method == 'PUT': if not r.exists('SN:' + str(snode_id)): return JSONResponse('Storage node with name: ' + str(snode_id) + ' not exists.', status=404) data = JSONParser().parse(request) r.hmset('SN:' + str(snode_id), data) return JSONResponse('The metadata of the storage node with name: ' + str(snode_id) + ' has been updated', status=201) if request.method == 'DELETE': r.delete("SN:" + str(snode_id)) return JSONResponse('Storage node has been deleted', status=204) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def list_storage_node(request): """ Add a storage node or list all the storage nodes saved in the registry. :param request: :return: JSONResponse """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == "GET": keys = r.keys("SN:*") storage_nodes = [] for k in keys: sn = r.hgetall(k) sn["id"] = k.split(":")[1] storage_nodes.append(sn) sorted_list = sorted(storage_nodes, key=itemgetter('name')) return JSONResponse(sorted_list, status=200) if request.method == "POST": sn_id = r.incr("storage_nodes:id") data = JSONParser().parse(request) r.hmset('SN:' + str(sn_id), data) return JSONResponse('Storage node has been added to the registry', status=201) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def get(self, request, metric_module_id): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if r.exists('workload_metric:' + str(metric_module_id)): workload_metric_path = os.path.join( settings.WORKLOAD_METRICS_DIR, str( r.hget('workload_metric:' + str(metric_module_id), 'metric_name'))) if os.path.exists(workload_metric_path): workload_metric_name = os.path.basename(workload_metric_path) workload_metric_size = os.stat(workload_metric_path).st_size # Generate response response = StreamingHttpResponse( FileWrapper(open(workload_metric_path), workload_metric_size), content_type=mimetypes.guess_type(workload_metric_path)[0]) response['Content-Length'] = workload_metric_size response[ 'Content-Disposition'] = "attachment; filename=%s" % workload_metric_name return response else: return HttpResponse(status=status.HTTP_404_NOT_FOUND) else: return HttpResponse(status=status.HTTP_404_NOT_FOUND)
def add_tenants_group(request): """ Add a tenant group or list all the tenants groups saved in the registry. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': keys = r.keys("G:*") gtenants = {} for key in keys: gtenant = r.lrange(key, 0, -1) gtenant_id = key.split(":")[1] gtenants[gtenant_id] = gtenant # gtenants.extend(eval(gtenant[0])) return JSONResponse(gtenants, status=status.HTTP_200_OK) if request.method == 'POST': data = JSONParser().parse(request) if not data: return JSONResponse('Tenant group cannot be empty', status=status.HTTP_400_BAD_REQUEST) gtenant_id = r.incr("gtenant:id") r.rpush('G:' + str(gtenant_id), *data) return JSONResponse('Tenant group has been added to the registry', status=status.HTTP_201_CREATED) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def dynamic_policy_detail(request, policy_id): """ Delete a dynamic policy. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'DELETE': create_local_host() try: rule_actors[int(policy_id)].stop_actor() del rule_actors[int(policy_id)] except Exception as e: logger.error(str(e)) print e r.delete('policy:' + policy_id) policies_ids = r.keys('policy:*') if len(policies_ids) == 0: r.set('policies:id', 0) return JSONResponse('Policy has been deleted', status=204) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def node_list(request): """ GET: List all nodes ordered by name """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': keys = r.keys("node:*") nodes = [] for key in keys: node = r.hgetall(key) node.pop("ssh_username", None) # username & password are not returned in the list node.pop("ssh_password", None) node['devices'] = json.loads(node['devices']) nodes.append(node) sorted_list = sorted(nodes, key=itemgetter('name')) return JSONResponse(sorted_list, status=status.HTTP_200_OK) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def add_metric(request): """ Get all registered workload metrics (GET) or add a new metric workload in the registry (POST). :param request: The http request. :type request: HttpRequest :return: A JSON list with all registered metrics (GET) or a success/error message depending on the result of the function. :rtype: JSONResponse """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': keys = r.keys("metric:*") metrics = [] for key in keys: metric = r.hgetall(key) metric["name"] = key.split(":")[1] metrics.append(metric) return JSONResponse(metrics, status=200) if request.method == 'POST': data = JSONParser().parse(request) name = data.pop("name", None) if not name: return JSONResponse('Metric must have a name', status=400) r.hmset('metric:' + str(name), data) return JSONResponse('Metric has been added in the registry', status=201) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def dependency_detail(request, dependency_id): """ Retrieve, update or delete a Dependency. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': dependency = r.hgetall("dependency:" + str(dependency_id)) return JSONResponse(dependency, status=200) elif request.method == 'PUT': data = JSONParser().parse(request) try: r.hmset('dependency:' + str(dependency_id), data) return JSONResponse("Data updated", status=201) except DataError: return JSONResponse("Error updating data", status=400) elif request.method == 'DELETE': r.delete("dependency:" + str(dependency_id)) return JSONResponse('Dependency has been deleted', status=204) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def sort_detail(request, sort_id): """ Retrieve, update or delete a Proxy Sorting. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': proxy_sorting = r.hgetall("proxy_sorting:" + str(sort_id)) return JSONResponse(proxy_sorting, status=status.HTTP_200_OK) elif request.method == 'PUT': try: data = JSONParser().parse(request) r.hmset('proxy_sorting:' + str(sort_id), data) return JSONResponse("Data updated", status=status.HTTP_201_CREATED) except redis.exceptions.DataError: return JSONResponse("Error updating data", status=status.HTTP_400_BAD_REQUEST) except ParseError: return JSONResponse("Invalid format or empty request", status=status.HTTP_400_BAD_REQUEST) elif request.method == 'DELETE': r.delete("proxy_sorting:" + str(sort_id)) return JSONResponse('Proxy sorting has been deleted', status=status.HTTP_204_NO_CONTENT) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def get(self, request, storlet_id, format=None): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if r.exists('filter:' + str(storlet_id)): filter_path = r.hget('filter:' + str(storlet_id), 'path') if os.path.exists(filter_path): filter_name = os.path.basename(filter_path) filter_size = os.stat(filter_path).st_size # Generate response response = StreamingHttpResponse( FileWrapper(open(filter_path), filter_size), content_type=mimetypes.guess_type(filter_path)[0]) response['Content-Length'] = filter_size response[ 'Content-Disposition'] = "attachment; filename=%s" % filter_name return response else: return HttpResponse(status=status.HTTP_404_NOT_FOUND) else: return HttpResponse(status=status.HTTP_404_NOT_FOUND)
def dependency_list(request): """ List all dependencies, or create a Dependency. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': keys = r.keys("dependency:*") dependencies = [] for key in keys: dependencies.append(r.hgetall(key)) return JSONResponse(dependencies, status=200) elif request.method == 'POST': data = JSONParser().parse(request) dependency_id = r.incr("dependencies:id") try: data["id"] = dependency_id r.hmset('dependency:' + str(dependency_id), data) return JSONResponse(data, status=201) except DataError: return JSONResponse("Error to save the filter", status=400) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def storlet_list(request): """ List all storlets, or create a new storlet. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': keys = r.keys("filter:*") storlets = [] for key in keys: storlet = r.hgetall(key) storlets.append(storlet) sorted_list = sorted(storlets, key=lambda x: int(itemgetter('id')(x))) return JSONResponse(sorted_list, status=status.HTTP_200_OK) if request.method == 'POST': try: data = JSONParser().parse(request) except ParseError: return JSONResponse("Invalid format or empty request", status=status.HTTP_400_BAD_REQUEST) if (('filter_type' not in data) or ((data['filter_type'] == 'storlet' or data['filter_type'] == 'native') and not check_keys(data.keys(), FILTER_KEYS[2:-1])) or ((data['filter_type'] == 'global') and not check_keys(data.keys(), GLOBAL_FILTER_KEYS[2:-1]))): return JSONResponse("Invalid parameters in request", status=status.HTTP_400_BAD_REQUEST) storlet_id = r.incr("filters:id") try: data['id'] = storlet_id r.hmset('filter:' + str(storlet_id), data) if data['filter_type'] == 'global': if data['enabled'] is True or data[ 'enabled'] == 'True' or data['enabled'] == 'true': to_json_bools(data, 'has_reverse', 'is_pre_get', 'is_post_get', 'is_pre_put', 'is_post_put', 'enabled') r.hset("global_filters", str(storlet_id), json.dumps(data)) return JSONResponse(data, status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error to save the object", status=status.HTTP_400_BAD_REQUEST) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def put(self, request, dependency_id, format=None): try: r = get_redis_connection() except RedisError: return JSONResponse('Problems to connect with the DB', status=500) if r.exists("dependency:" + str(dependency_id)): file_obj = request.FILES['file'] path = save_file(file_obj, settings.DEPENDENCY_DIR) r.hset("dependency:" + str(dependency_id), "path", str(path)) return JSONResponse('Dependency has been updated', status=201) return JSONResponse('Dependency does not exist', status=404)
def tenants_group_detail(request, gtenant_id): """ Get, update or delete a tenants group from the registry. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': key = 'G:' + str(gtenant_id) if r.exists(key): gtenant = r.lrange(key, 0, -1) return JSONResponse(gtenant, status=status.HTTP_200_OK) else: return JSONResponse('The tenant group with id: ' + str(gtenant_id) + ' does not exist.', status=status.HTTP_404_NOT_FOUND) if request.method == 'PUT': key = 'G:' + str(gtenant_id) if r.exists(key): data = JSONParser().parse(request) if not data: return JSONResponse('Tenant group cannot be empty', status=status.HTTP_400_BAD_REQUEST) pipe = r.pipeline() # the following commands are buffered in a single atomic request (to replace current contents) if pipe.delete(key).rpush(key, *data).execute(): return JSONResponse( 'The members of the tenants group with id: ' + str(gtenant_id) + ' has been updated', status=status.HTTP_201_CREATED) return JSONResponse('Error storing the tenant group in the DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) else: return JSONResponse('The tenant group with id: ' + str(gtenant_id) + ' does not exist.', status=status.HTTP_404_NOT_FOUND) if request.method == 'DELETE': key = 'G:' + str(gtenant_id) if r.exists(key): r.delete("G:" + str(gtenant_id)) return JSONResponse('Tenants group has been deleted', status=status.HTTP_204_NO_CONTENT) else: return JSONResponse('The tenant group with id: ' + str(gtenant_id) + ' does not exist.', status=status.HTTP_404_NOT_FOUND) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def put(self, request, storlet_id, format=None): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) filter_name = "filter:" + str(storlet_id) if r.exists(filter_name): file_obj = request.FILES['file'] filter_type = r.hget(filter_name, 'filter_type') if (filter_type == 'storlet' and not file_obj.name.endswith('.jar')) or \ (filter_type == 'native' and not file_obj.name.endswith('.py')) or \ (filter_type == 'global' and not file_obj.name.endswith('.py')): return JSONResponse( 'Uploaded file is incompatible with filter type', status=status.HTTP_400_BAD_REQUEST) if filter_type == 'storlet': filter_dir = settings.STORLET_FILTERS_DIR elif filter_type == 'native': filter_dir = settings.NATIVE_FILTERS_DIR else: # global filter_dir = settings.GLOBAL_NATIVE_FILTERS_DIR make_sure_path_exists(filter_dir) path = save_file(file_obj, filter_dir) md5_etag = md5(path) try: r.hset(filter_name, "filter_name", os.path.basename(path)) r.hset(filter_name, "path", str(path)) r.hset(filter_name, "content_length", str(request.META["CONTENT_LENGTH"])) r.hset(filter_name, "etag", str(md5_etag)) except RedisError: return JSONResponse( 'Problems connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if filter_type == 'native' or filter_type == 'global': # synchronize metrics directory with all nodes try: rsync_dir_with_nodes(filter_dir) except FileSynchronizationException as e: return JSONResponse( e.message, status=status.HTTP_500_INTERNAL_SERVER_ERROR) return JSONResponse('Filter has been updated', status=status.HTTP_201_CREATED) return JSONResponse('Filter does not exist', status=status.HTTP_404_NOT_FOUND)
def dependency_deploy(request, dependency_id, account): token = get_token_connection(request) if request.method == 'PUT': try: r = get_redis_connection() except RedisError: return JSONResponse('Problems to connect with the DB', status=500) dependency = r.hgetall("dependency:" + str(dependency_id)) if not dependency: return JSONResponse('Dependency does not exist', status=404) metadata = { 'X-Object-Meta-Storlet-Dependency-Version': str(dependency["version"]) } if "path" not in dependency.keys(): return JSONResponse('Dependency path does not exist', status=404) try: dependency_file = open(dependency["path"], 'r') content_length = None response = dict() url = settings.SWIFT_URL + settings.SWIFT_API_VERSION + "/AUTH_" + str( account) swift_client.put_object(url, token, 'dependency', dependency["name"], dependency_file, content_length, None, None, "application/octet-stream", metadata, None, None, None, response) except ClientException: return JSONResponse(response.get("reason"), status=response.get('status')) finally: dependency_file.close() status = response.get('status') if status == 201: if r.exists("AUTH_" + str(account) + ":dependency:" + str(dependency['name'])): return JSONResponse("Already deployed", status=200) if r.lpush("AUTH_" + str(account) + ":dependencies", str(dependency['name'])): return JSONResponse("Deployed", status=201) return JSONResponse("error", status=400) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def node_detail(request, node_id): """ GET: Retrieve node details. PUT: Update node. :param request: :param node_id: :return: """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) key = "node:" + node_id if request.method == 'GET': if r.exists(key): node = r.hgetall(key) node.pop("ssh_password", None) # password is not returned node['devices'] = json.loads(node['devices']) return JSONResponse(node, status=status.HTTP_200_OK) else: return JSONResponse('Node not found.', status=status.HTTP_404_NOT_FOUND) if request.method == 'PUT': if r.exists(key): data = JSONParser().parse(request) try: r.hmset(key, data) return JSONResponse("Data updated", status=status.HTTP_201_CREATED) except RedisError: return JSONResponse("Error updating data", status=status.HTTP_400_BAD_REQUEST) else: return JSONResponse('Node not found.', status=status.HTTP_404_NOT_FOUND) if request.method == 'DELETE': # Deletes the key. If the node is alive, the metric middleware will recreate this key again. if r.exists(key): node = r.delete(key) return JSONResponse('Node has been deleted', status=status.HTTP_204_NO_CONTENT) else: return JSONResponse('Node not found.', status=status.HTTP_404_NOT_FOUND) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def global_controller_detail(request, controller_id): """ Retrieve, update or delete a global controller. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': controller = r.hgetall('controller:' + str(controller_id)) to_json_bools(controller, 'enabled') return JSONResponse(controller, status=status.HTTP_200_OK) elif request.method == 'PUT': data = JSONParser().parse(request) try: r.hmset('controller:' + str(controller_id), data) controller_data = r.hgetall('controller:' + str(controller_id)) to_json_bools(controller_data, 'enabled') if controller_data['enabled']: actor_id = controller_data['controller_name'].split('.')[0] start_global_controller(str(controller_id), actor_id, controller_data['class_name'], controller_data['type'], controller_data['dsl_filter']) else: stop_global_controller(str(controller_id)) return JSONResponse("Data updated", status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error updating data", status=status.HTTP_400_BAD_REQUEST) elif request.method == 'DELETE': r.delete("controller:" + str(controller_id)) # If this is the last controller, the counter is reset keys = r.keys('controller:*') if not keys: r.delete('controllers:id') return JSONResponse('Controller has been deleted', status=status.HTTP_204_NO_CONTENT) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def object_type_detail(request, object_type_name): """ GET: List extensions allowed about an object type word registered. PUT: Update the object type word registered. DELETE: Delete the object type word registered. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) key = "object_type:" + object_type_name if request.method == 'GET': if r.exists(key): types_list = r.lrange(key, 0, -1) object_type = {"name": object_type_name, "types_list": types_list} return JSONResponse(object_type, status=status.HTTP_200_OK) return JSONResponse("Object type not found", status=status.HTTP_404_NOT_FOUND) if request.method == "PUT": if not r.exists(key): return JSONResponse('The object type with name: ' + object_type_name + ' does not exist.', status=status.HTTP_404_NOT_FOUND) data = JSONParser().parse(request) if not data: return JSONResponse( 'Object type must have a types_list defining the valid object types', status=status.HTTP_400_BAD_REQUEST) pipe = r.pipeline() # the following commands are buffered in a single atomic request (to replace current contents) if pipe.delete(key).rpush(key, *data).execute(): return JSONResponse('The object type ' + str(object_type_name) + ' has been updated', status=status.HTTP_201_CREATED) return JSONResponse('Error storing the object type in the DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == "DELETE": if r.exists(key): object_type = r.delete(key) return JSONResponse(object_type, status=status.HTTP_200_OK) return JSONResponse("Object type not found", status=status.HTTP_404_NOT_FOUND) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def dependency_list_deployed(request, account): if request.method == 'GET': try: r = get_redis_connection() except RedisError: return JSONResponse('Problems to connect with the DB', status=500) result = r.lrange("AUTH_" + str(account) + ":dependencies", 0, -1) if result: return JSONResponse(result, status=200) else: return JSONResponse('Any Storlet deployed', status=404) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def post(self, request): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) data = json.loads( request.POST['metadata'] ) # json data is in metadata parameter for this request if not data: return JSONResponse("Invalid format or empty request", status=status.HTTP_400_BAD_REQUEST) workload_metric_id = r.incr("workload_metrics:id") try: data['id'] = workload_metric_id file_obj = request.FILES['file'] make_sure_path_exists(settings.WORKLOAD_METRICS_DIR) path = save_file(file_obj, settings.WORKLOAD_METRICS_DIR) data['metric_name'] = os.path.basename(path) # synchronize metrics directory with all nodes try: rsync_dir_with_nodes(settings.WORKLOAD_METRICS_DIR) except FileSynchronizationException as e: # print "FileSynchronizationException", e # TODO remove return JSONResponse( e.message, status=status.HTTP_500_INTERNAL_SERVER_ERROR) r.hmset('workload_metric:' + str(workload_metric_id), data) if data['enabled']: actor_id = data['metric_name'].split('.')[0] start_metric(workload_metric_id, actor_id) return JSONResponse(data, status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error to save the object", status=status.HTTP_400_BAD_REQUEST) except Exception as e: print e logger.error(str(e)) return JSONResponse("Error uploading file", status=status.HTTP_400_BAD_REQUEST)
def gtenants_tenant_detail(request, gtenant_id, tenant_id): """ Delete a member from a tenants group. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'DELETE': r.lrem("G:" + str(gtenant_id), str(tenant_id), 1) return JSONResponse('Tenant ' + str(tenant_id) + ' has been deleted from group with the id: ' + str(gtenant_id), status=status.HTTP_204_NO_CONTENT) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def object_type_items_detail(request, object_type_name, item_name): """ Delete an extension from an object type definition. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'DELETE': r.lrem("object_type:" + str(object_type_name), str(item_name), 1) return JSONResponse('Extension ' + str(item_name) + ' has been deleted from object type ' + str(object_type_name), status=204) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def load_metrics(): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) workload_metrics = r.keys("workload_metric:*") if workload_metrics: logger.info("Starting workload metrics") for wm in workload_metrics: wm_data = r.hgetall(wm) if wm_data['enabled'] == 'True': actor_id = wm_data['metric_name'].split('.')[0] metric_id = int(wm_data['id']) start_metric(metric_id, actor_id)