def post(self, request): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) data = json.loads(request.POST['metadata']) # json data is in metadata parameter for this request if not data: return JSONResponse("Invalid format or empty request", status=status.HTTP_400_BAD_REQUEST) controller_id = r.incr("controllers:id") try: data['id'] = controller_id file_obj = request.FILES['file'] make_sure_path_exists(settings.CONTROLLERS_DIR) path = save_file(file_obj, settings.CONTROLLERS_DIR) data['controller_name'] = os.path.basename(path) r.hmset('controller:' + str(controller_id), data) return JSONResponse(data, status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error to save the object", status=status.HTTP_400_BAD_REQUEST) except ValueError: return JSONResponse("Error starting/stoping controller", status=status.HTTP_400_BAD_REQUEST) except Exception: return JSONResponse("Error uploading file", status=status.HTTP_400_BAD_REQUEST)
def put(self, request, metric_module_id): data = {} try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) try: file_obj = request.FILES['file'] make_sure_path_exists(settings.WORKLOAD_METRICS_DIR) path = save_file(file_obj, settings.WORKLOAD_METRICS_DIR) data['metric_name'] = os.path.basename(path) # synchronize metrics directory with all nodes try: rsync_dir_with_nodes(settings.WORKLOAD_METRICS_DIR, settings.WORKLOAD_METRICS_DIR) except FileSynchronizationException as e: # print "FileSynchronizationException", e # TODO remove return JSONResponse(e.message, status=status.HTTP_500_INTERNAL_SERVER_ERROR) r.hmset('workload_metric:' + str(metric_module_id), data) return JSONResponse("Data updated", status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error updating data", status=status.HTTP_400_BAD_REQUEST) except ValueError: return JSONResponse("Error starting controller", status=status.HTTP_400_BAD_REQUEST)
def put(self, request, dependency_id, format=None): try: r = get_redis_connection() except RedisError: return JSONResponse('Problems to connect with the DB', status=500) if r.exists("dependency:" + str(dependency_id)): file_obj = request.FILES['file'] path = save_file(file_obj, settings.DEPENDENCY_DIR) r.hset("dependency:" + str(dependency_id), "path", str(path)) return JSONResponse('Dependency has been updated', status=201) return JSONResponse('Dependency does not exist', status=404)
def put(self, request, controller_id): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) try: file_obj = request.FILES['file'] make_sure_path_exists(settings.CONTROLLERS_DIR) path = save_file(file_obj, settings.CONTROLLERS_DIR) r.hmset('controller:' + str(controller_id), {'controller_name': os.path.basename(path)}) return JSONResponse("Data updated", status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error updating data", status=status.HTTP_400_BAD_REQUEST) except ValueError: return JSONResponse("Error starting controller", status=status.HTTP_400_BAD_REQUEST)
def post(self, request): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) data = json.loads(request.POST['metadata']) # json data is in metadata parameter for this request if not data: return JSONResponse("Invalid format or empty request", status=status.HTTP_400_BAD_REQUEST) workload_metric_id = r.incr("workload_metrics:id") try: data['id'] = workload_metric_id file_obj = request.FILES['file'] make_sure_path_exists(settings.WORKLOAD_METRICS_DIR) path = save_file(file_obj, settings.WORKLOAD_METRICS_DIR) data['metric_name'] = os.path.basename(path) # synchronize metrics directory with all nodes try: rsync_dir_with_nodes(settings.WORKLOAD_METRICS_DIR, settings.WORKLOAD_METRICS_DIR) except FileSynchronizationException as e: # print "FileSynchronizationException", e # TODO remove return JSONResponse(e.message, status=status.HTTP_500_INTERNAL_SERVER_ERROR) r.hmset('workload_metric:' + str(workload_metric_id), data) return JSONResponse(data, status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error to save the object", status=status.HTTP_400_BAD_REQUEST) except Exception as e: print e logger.error(str(e)) return JSONResponse("Error uploading file", status=status.HTTP_400_BAD_REQUEST)
def post(self, request): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) data = json.loads( request.POST['metadata'] ) # json data is in metadata parameter for this request if not data: return JSONResponse("Invalid format or empty request", status=status.HTTP_400_BAD_REQUEST) controller_id = r.incr("controllers:id") try: data['id'] = controller_id file_obj = request.FILES['file'] make_sure_path_exists(settings.CONTROLLERS_DIR) path = save_file(file_obj, settings.CONTROLLERS_DIR) data['controller_name'] = os.path.basename(path) r.hmset('controller:' + str(controller_id), data) return JSONResponse(data, status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error to save the object", status=status.HTTP_400_BAD_REQUEST) except ValueError: return JSONResponse("Error starting/stoping controller", status=status.HTTP_400_BAD_REQUEST) except Exception: return JSONResponse("Error uploading file", status=status.HTTP_400_BAD_REQUEST)
def put(self, request, filter_id, format=None): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) filter_name = "filter:" + str(filter_id) if r.exists(filter_name): file_obj = request.FILES['file'] filter_type = r.hget(filter_name, 'filter_type') if (filter_type == 'storlet' and not (file_obj.name.endswith('.jar') or file_obj.name.endswith('.py'))) or \ (filter_type == 'native' and not file_obj.name.endswith('.py')): return JSONResponse('Uploaded file is incompatible with filter type', status=status.HTTP_400_BAD_REQUEST) if filter_type == 'storlet': filter_dir = settings.STORLET_FILTERS_DIR elif filter_type == 'native': filter_dir = settings.NATIVE_FILTERS_DIR make_sure_path_exists(filter_dir) path = save_file(file_obj, filter_dir) md5_etag = md5(path) try: filter_basename = os.path.basename(path) content_length = os.stat(path).st_size etag = str(md5_etag) path = str(path) r.hset(filter_name, "filter_name", filter_basename) r.hset(filter_name, "path", path) r.hset(filter_name, "content_length", content_length) r.hset(filter_name, "etag", etag) except RedisError: return JSONResponse('Problems connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) # Update info in already deployed filters filter_data = r.hgetall(filter_name) main = filter_data['main'] token = get_token_connection(request) pipelines = r.keys('pipeline:*') for pipeline in pipelines: target = pipeline.replace('pipeline:', '') filters_data = r.hgetall(pipeline) for policy_id in filters_data: parameters = {} parameters["policy_id"] = policy_id cfilter = eval(filters_data[policy_id].replace('true', '"True"').replace('false', '"False"')) if cfilter['dsl_name'] == filter_id: cfilter['filter_name'] = filter_basename cfilter['content_length'] = content_length cfilter['etag'] = etag cfilter['path'] = path cfilter['main'] = main set_filter(r, target, cfilter, parameters, token) if filter_type == 'native': # synchronize metrics directory with all nodes try: rsync_dir_with_nodes(filter_dir, filter_dir) except FileSynchronizationException as e: return JSONResponse(e.message, status=status.HTTP_500_INTERNAL_SERVER_ERROR) return JSONResponse('Filter has been updated', status=status.HTTP_201_CREATED) return JSONResponse('Filter does not exist', status=status.HTTP_404_NOT_FOUND)