def create(self, request, *args, **kwargs): print("Entered function") data = JSONParser().parse(request) # if not is_user_following_subreddit(request.user.id, data.get('subreddit')): # return Response(data={'detail': 'User is not following the sub reddit'}, # status=status.HTTP_401_UNAUTHORIZED) tempuser = User.objects.get(username="******") data['author'] = tempuser.id tags = data.pop('tags', None) links = data.pop('links', None) serializer = PostSerializer(data=data, context={ "tags": tags, "links": links }) if serializer.is_valid(): serializer.save() return Response(data={'detail': 'Post Created'}, status=status.HTTP_201_CREATED) else: return Response(data={ 'detail': 'Invalid Post', 'errors': serializer.errors }, status=status.HTTP_400_BAD_REQUEST)
def database_to_client_sync(self, request): """Client provides sn and last_sync_reagent time, and server returns change log of what the server has done since. An autostainer entry is created automatically if it does not exist. Args: autostainer_sn: autostainer serial number provided in settings.ini, corresponding to MACHINE parameter last_sync_reagent: timestamp of previous client reagent sync Returns: ReagentDelta model of all changes client is missing """ data = JSONParser().parse(request) logger.debug(data) last_sync = data.pop('last_sync_reagent', None) autostainer_sn = data.pop('autostainer_sn', None) autostainer, created = AutoStainerStation.objects\ .get_or_create(autostainer_sn=autostainer_sn) if not last_sync: # we've never sync'd before, # TODO: decide what to do if we've never synced before logger.warning('"last_sync_reagent" was None') return Response(status=status.HTTP_400_BAD_REQUEST) dt_last_update = datetime.strptime(last_sync, '%Y-%m-%dT%H:%M:%S%z') missing_changes = ReagentDelta.objects.filter(date__gt=dt_last_update)\ .exclude(executor=autostainer) serializer = ReagentDeltaSerializer(missing_changes, many=True) # keep a record of when the last time an autostainer has synced autostainer.latest_sync_time_Reagent = now() autostainer.save() logger.info(serializer.data) return Response(serializer.data, status=status.HTTP_200_OK)
def api_order_index(request): if request.method == 'POST': data = JSONParser().parse(request).get('data') prods = data['Prod'] data.pop('Prod') serializer = ApiOrderIndexPostSerializer(data=data) if serializer.is_valid(): order = serializer.save() order_add_item(prods, order) else: print(serializer.error_messages) elif request.method == 'PUT': data = JSONParser().parse(request).get('data') order = Order.objects.get(order_id=data.get('order_id')) serializer = ApiOrderIndexPostSerializer(order, data=data) if serializer.is_valid(): serializer.save() data = { 'Order': Order.objects.all(), 'Cust': Customer.objects.all(), 'Graph': get_last_year_orders() } serializer = ApiOrderIndexGetSerializer(data) return JsonResponse(serializer.data)
def partial_update(self, request, *args, **kwargs): pk = kwargs['pk'] if not does_post_exist(pk): return Response(data={'detail': 'Post does not exist'}, status=status.HTTP_404_NOT_FOUND) data = JSONParser().parse(request) prev_post = Post.objects.get(pk=kwargs['pk']) tags = data.pop('tags', None) links = data.pop('links', None) serializer = PostSerializer(prev_post, data=data, partial=True, context={ "tags": tags, "links": links, "partial": True }) if serializer.is_valid(): serializer.save() return Response(data={'detail': 'Post Updated'}, status=status.HTTP_201_CREATED) else: return Response(data={ 'detail': 'Invalid Details', 'errors': serializer.errors }, status=status.HTTP_400_BAD_REQUEST)
def post(self, request): """Create a question""" if request.user.user_group == 'Student': return Response(status=status.HTTP_403_FORBIDDEN) post_data = JSONParser().parse(request)[0] if "id" in post_data: post_data.pop('id') if "parents_node" in post_data: parents_id = post_data.pop('parents_node') else: return Response({"errors": "No parents_id"}, status=404) if not parents_id: return Response({"errors": "No parents_id"}, status=404) parents = [] for i in parents_id: try: node = KnowledgeNode.objects.get(id=i) except KnowledgeNode.DoesNotExist: raise Http404 parents.append(node) bank = node.question_bank q_group = QuestionGroup.objects.create( current_version=timezone.now(), belong_bank=bank, ) q_group.save() q_group.parents_node.set(parents) q_group.save() post_data['question_change_time'] = q_group.current_version post_data['history_version_id'] = q_group.id question = self.create_question_from_data(post_data) if question.is_valid(): new_q = question.save() response = question.data response['id'] = new_q.id bank.question_count = len(bank.questiongroup_set.all()) bank.lastUpdate = q_group.current_version bank.save() response['question_type'] = INT2TYPE[(str)( response['question_type'])] response['parents_node'] = parents_id response['root_id'] = q_group.belong_bank.root_id return Response(response, status=201) q_group.delete() return Response(question.errors, status=400)
def test_LoRaSerializer(): json = b""" { "app_id":"dayton-engineering-and-geology", "dev_id":"180291", "hardware_serial":"000DB5390864367B", "port":2, "counter":4555, "payload_raw":"0oCH/////w==", "payload_fields":{ "b":4.2, "sm1":255, "sm2":255, "sm3":255, "sm4":255, "t1":28, "t2":35 }, "metadata":{ "time":"2019-09-29T17:17:03.147714091Z", "frequency":904.9, "modulation":"LORA", "data_rate":"SF10BW125", "coding_rate":"4/5", "gateways":[ { "gtw_id":"rg1xx294cb6", "gtw_trusted":true, "timestamp":10479492, "time":"", "channel":5, "rssi":-58, "snr":9.25, "rf_chain":1, "latitude":39.741287, "longitude":-84.18488 } ] }, "downlink_url":"https://integrations.thethingsnetwork.org/ttn-us-west/api/v2/down/dayton-engineering-and-geology/webhook_test?key=ttn-account-v2.kY1MRQUoGICp7C9CAEvhEdGklPVWW-ztIiU0aVRLxno" } """ stream = io.BytesIO(json) data = JSONParser().parse(stream) print(data) metadata_data = data.pop('metadata') gateway_data = metadata_data.pop('gateways') payload_fields_data = data.pop('payload_fields') serializer = LoRaGatewaySensorSerializer(data=data) print('VALID' if serializer.is_valid() else 'NOT VALID') print(serializer.validated_data) '''
def create_project(request, id): """ :param request: :param id: user_id :return: """ result = {"data": None, "msg": "success", "code": 200} owner_id = id receive_data = JSONParser().parse(request) developer = receive_data.pop("developer") if not developer: result['msg'] = "failed" result['code'] = 1001 return JsonResponse(data=result['data'], code=result['code'], msg=result['msg']) for k in list(receive_data.keys()): if receive_data.get(k) is None: receive_data.pop(k) group_data = dict(create_or_delete=False, update=True) #创建组 group = GroupSerializer(data=group_data) if not group.is_valid(): result['msg'] = "failed" result['code'] = 1002 return JsonResponse(data=result['data'], code=result['code'], msg=result['msg']) g = group.save() receive_data.update({'owner_id': owner_id}) #建立每个用户和组之间的关系 for user_id in developer: GroupUser.objects.create(uid=user_id, gid=g.gid) p = ProjectSerializer(data=receive_data) if not p.is_valid(): result['msg'] = "failed" result['code'] = 1003 return JsonResponse(data=result['data'], code=result['code'], msg=result['msg']) project = p.save() data = ProjectSerializer(instance=project).data #建立项目和组的关系 ProjectGroup.objects.create(pid=project.pid, gid=g.gid) data.update(dict(developer=developer)) return JsonResponse(data=data, code=200, msg="success")
def post(self,request): data = JSONParser().parse(request) password = data["password"] data.pop('password', None) serializer = HackerSerializer(data=data) print(password,admin1) if password != admin1: if serializer.is_valid(): return JsonResponse(serializer.errors, status=401) else: if serializer.is_valid(): serializer.save() return JsonResponse(serializer.data, status=200) return JsonResponse(serializer.errors, status=400) return JsonResponse(serializer.errors, status=400)
def create(self, request): data = JSONParser().parse(request) try: email = data.pop("email", "") client = Client.objects.get(email=email) except Client.DoesNotExist: return JsonResponse( {"error": "Client does not exist"}, status=status.HTTP_404_NOT_FOUND, ) letter_sub_serializer = LetterSubscriptionSerializer( data=data, context={"request": request}) if letter_sub_serializer.is_valid(): letter_sub_serializer.save() letter_sub_url = letter_sub_serializer.data["url"] if client.letter_sub_url != "": return JsonResponse( {"error": "Client already has a subscription."}, status=status.HTTP_400_BAD_REQUEST, ) client.letter_sub_url = letter_sub_url client.save() return JsonResponse(letter_sub_serializer.data, status=status.HTTP_200_OK) else: return JsonResponse(letter_sub_serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def put(self, request, uuid=None, format =None): data = request.body stream = io.BytesIO(data) python_data = JSONParser().parse(stream) movie_data = python_data.pop('movies') new_movies_title=[movie['title'] for movie in movie_data] updated_collection_1 = Collection.objects.filter(uuid = uuid).update(**python_data) updated_collection=Collection.objects.get(uuid=uuid) old_movies= Movies.objects.filter(collection__uuid=uuid) old_movies_title=[ movie.title for movie in old_movies] """Movies Remain in the updated collection""" comman_movies_list = [movie for movie in new_movies_title if movie in old_movies_title] """Movies Add in updated collection""" uncomman_movies_list= [movie for movie in new_movies_title if movie not in comman_movies_list] uncomman_data=[data for data in movie_data if data['title'] in uncomman_movies_list] for movie in uncomman_data: add_movie(collection_uuid=uuid, **movie) """Removing from updated collection or deleted""" remove_movies_list=[movie for movie in old_movies_title if movie not in comman_movies_list] for movie_1 in remove_movies_list: """function remove_movie for removing movies form collection """ remove_movie(collection_uuid=uuid, movie_title=movie_1) serializers=Collection_read_Serializer(updated_collection) return Response({"updated collection":serializers.data})
def dogWalkerConstraintsList(request, name): try: dogWalker = DogWalker.objects.get(name=name) if request.method == 'GET': constraints = list(WalkerConstraint.objects.filter(walker=dogWalker)) serializer = ConstraintSerializer(constraints, many=True) return Response(serializer.data, status=status.HTTP_200_OK) elif request.method == 'POST': data = JSONParser().parse(request) print(data['start']) (startDatetime, endDatetime) = parseDateTime(data) data['start'] = datetime.time(hour=startDatetime.hour, minute=startDatetime.minute, second=startDatetime.second) data['end'] = datetime.time(hour=endDatetime.hour, minute=endDatetime.minute, second=endDatetime.second) data['walkerId'] = dogWalker.id sizes = data.pop('sizesAllowed') serializerList =[] for size in sizes: data['sizesAllowed'] = size serializer = ConstraintSerializer(data=data) if serializer.is_valid(): serializer.save() serializerList.append(serializer.data) else: return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) else: response = ConstraintSerializer(serializerList, many=True) return Response(response.data,status=status.HTTP_200_OK) except User.DoesNotExist: return Response("The user does not exist", status=status.HTTP_404_NOT_FOUND) except DogWalker.DoesNotExist: return Response("The specified user is not a walker", status=status.HTTP_404_NOT_FOUND)
def add_dynamic_filter(request): """ Add a filter with its default parameters in the registry (redis). List all the dynamic filters registered. """ try: r = get_redis_connection() except: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': keys = r.keys("filter:*") dynamic_filters = [] for key in keys: dynamic_filter = r.hgetall(key) dynamic_filter["name"]=key.split(":")[1] dynamic_filters.append(dynamic_filter) return JSONResponse(dynamic_filters, status=200) if request.method == 'POST': data = JSONParser().parse(request) name = data.pop("name", None) if not name: return JSONResponse('Filter must have a name', status=400) r.hmset('filter:'+str(name), data) return JSONResponse('Filter has been added in the registy', status=201) return JSONResponse('Method '+str(request.method)+' not allowed.', status=405)
def add_metric(request): """ Get all registered workload metrics (GET) or add a new metric workload in the registry (POST). :param request: The http request. :type request: HttpRequest :return: A JSON list with all registered metrics (GET) or a success/error message depending on the result of the function. :rtype: JSONResponse """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': keys = r.keys("metric:*") metrics = [] for key in keys: metric = r.hgetall(key) metric["name"] = key.split(":")[1] metrics.append(metric) return JSONResponse(metrics, status=200) if request.method == 'POST': data = JSONParser().parse(request) name = data.pop("name", None) if not name: return JSONResponse('Metric must have a name', status=400) r.hmset('metric:' + str(name), data) return JSONResponse('Metric has been added in the registry', status=201) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def signUser(request): data = JSONParser().parse(request) user = {'username': data['name'], 'password': data['password']} userDict = dict() userDict['email'] = data.pop('email') userDict['user'] = user return userDict
def add_metric(request): """ Add a metric workload in the registry (redis) """ try: r = get_redis_connection() except: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': keys = r.keys("metric:*") print 'keys', keys metrics = [] for key in keys: metric = r.hgetall(key) metric["name"]=key.split(":")[1] metrics.append(metric) return JSONResponse(metrics, status=200) if request.method == 'POST': data = JSONParser().parse(request) name = data.pop("name", None) if not name: return JSONResponse('Metric must have a name', status=400) r.hmset('metric:'+str(name), data) return JSONResponse('Metric has been added in the registy', status=201) return JSONResponse('Method '+str(request.method)+' not allowed.', status=405)
def add_dynamic_filter(request): """ Add a filter with its default parameters in the registry (redis). List all the dynamic filters registered. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': keys = r.keys("dsl_filter:*") dynamic_filters = [] for key in keys: dynamic_filter = r.hgetall(key) dynamic_filter["name"] = key.split(":")[1] dynamic_filters.append(dynamic_filter) return JSONResponse(dynamic_filters, status=200) if request.method == 'POST': data = JSONParser().parse(request) name = data.pop("name", None) if not name: return JSONResponse('Filter must have a name', status=400) r.hmset('dsl_filter:' + str(name), data) return JSONResponse('Filter has been added to the registy', status=201) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def donate(request): data = JSONParser().parse(request) stripe.api_key = "sk_test_51HoFgjCxgtcfoZwvcEdcYWIIp09TagQbzRsNAnY34gPlj6zMdDSxgN9tK9FzMbVIJWJMEkM7SKlqAxTZEGmS9CHl00Dxl3xZhc" try: stripe.Charge.create( amount=data["amount"], currency="USD", source=data["authToken"], ) except: return Response('Credit Card Invalid', status=401) data.pop("authToken") serializer = DonationSerializer(data=data) if serializer.is_valid(): serializer.save() return JsonResponse(serializer.data, status=201) return JsonResponse(serializer.errors)
def post(self, request): """Create a new QuestionBank""" if request.user.user_group == 'Student': return Response(status=status.HTTP_403_FORBIDDEN) post_data = JSONParser().parse(request) if "id" in post_data: post_data.pop("id") root = KnowledgeNode.objects.create() root.name = "Root" post_data['root_id'] = root.id serializer = QuestionBankSerializer(data=post_data) if serializer.is_valid(): new_bank = serializer.save() root.question_bank = new_bank root.save() response = serializer.data response['id'] = new_bank.id self.create_auth_code(new_bank) return Response(response, status=200) return Response(serializer.errors, status=400)
def post(self, request, format =None): data=request.body stream=io.BytesIO(data) python_data=JSONParser().parse(stream) movie_data=python_data.pop('movies') user=User.objects.get(username=request.user) collection=Collection.objects.create(user=user, **python_data) for movie in movie_data: add_movie(collection_uuid=collection.uuid, **movie) return Response({"collection_uuid": collection.uuid}, status=status.HTTP_201_CREATED)
def prepare_data(data: bytes) -> dict: stream = io.BytesIO(data) parsed_data = JSONParser().parse(stream) parsed_data.pop("ip") parsed_data.pop("location") parsed_data.pop("type") return parsed_data
def put(self, request, q_id): """Update information of the Question whose id=q_id""" if request.user.user_group == 'Student': return Response(status=status.HTTP_403_FORBIDDEN) post_data = JSONParser().parse(request)[0] if "id" in post_data: post_data.pop("id") old_q = self.get_object(q_id) q_group = old_q.history_version post_data['question_change_time'] = timezone.now() post_data['history_version_id'] = q_group.id question = QuestionList.create_question_from_data(post_data) if question.is_valid(): new_q = question.save() new_parents = [] for i in post_data['parents_node']: new_parents.append(KnowledgeNode.objects.get(id=i)) q_group.parents_node.set(new_parents) q_group.current_version = new_q.question_change_time q_group.save() bank = q_group.belong_bank bank.lastUpdate = new_q.question_change_time bank.save() response = question.data response['id'] = new_q.id response['question_type'] = INT2TYPE[(str)( response['question_type'])] response['parents_node'] = post_data['parents_node'] response['root_id'] = q_group.belong_bank.root_id return Response(response, status=201) return Response(question.errors, status=400)
def updateMember(request, authMember): try: body = JSONParser().parse(request) # Body contains no information about fields to update. if len(body.keys()) < 1: return JSONResponse({'msg': 'There is nothing to update'}, status=200) # Removes the read only fields from body if exists. if 'id' in body.keys(): body.pop('id') if 'token' in body.keys(): body.pop('token') if "is_active" in body.keys(): body.pop("is_active") if "is_verified" in body.keys(): body.pop("is_verified") if authMember.email is not "" and "email" in body.keys() and \ authMember.email == body["email"].lower(): del body["email"] sendEmail = False if "email" in body.keys() and body["email"] is not "": body["email"] = body["email"].lower() if Member.objects.filter(email=body["email"]).exists(): raise ValueError( "Email already exists: {}".format(body["email"])) else: body["is_active"] = False body["ev_token"] = str(authMember.id) + binascii.b2a_hex( os.urandom(12)) sendEmail = True # Hash the password before update. if 'password' in body.keys(): body['password'] = hashpassword(body['password']) # Finally mem's remaining provided fields will be updated if provided. memQuerySet = Member.objects.filter(token=authMember.token) memQuerySet.update(**body) if sendEmail: sendEmailVerification(authMember.id, body["email"], body["ev_token"], request) return JSONResponse(getCleanObject(json.loads(serialize('json', memQuerySet))), status=200) except: logging.error("Update mem failed: {}".format(reportError())) return JSONResponse(reportError(), status=400)
def create(self, request, *args, **kwargs): if request.method == "POST": stream = io.BytesIO(request.body) data = JSONParser().parse(stream) fhict_class = Class.objects.filter(name=data["class_name"]).get() data.pop("class_name") data["class_id_id"] = fhict_class.id serializer = StudentSerializer(data=data) if serializer.is_valid(): serializer.save(mentor_id_id=data["mentor_id_id"], class_id_id=fhict_class.id) response = JsonResponse(serializer.data, safe=False) response["Access-Control-Allow-Origin"] = "*" else: print(serializer.errors) existing_student = StudentSerializer( Student.objects.get(student_number=data["student_number"])) response = JsonResponse(existing_student.data, safe=False) return response
def create(self, request, *args, **kwargs): data = JSONParser().parse(request) tags = data.pop('tags', None) serializer = SubRedditSerializer(data=data, context={"tags": tags}) if serializer.is_valid(): serializer.save() return Response(data={'detail': 'SubReddit Created'}, status=status.HTTP_201_CREATED) else: return Response(data={ 'detail': 'Invalid SubReddit', 'errors': serializer.errors }, status=status.HTTP_400_BAD_REQUEST)
def database_to_client_sync(self, request): """Client provides sn and last_sync time, returns change log of what the server has done since. ASHome should call this as as soon as users enter the PA dialog. An autostainer entry is created automatically if it does not exist. Arguments (request): autostainer_sn: autostainer serial number provided in settings.ini, corresponding to MACHINE parameter last_sync: timestamp of last time the client sync Returns: PADelta model of all changes greater than last_sync """ data = JSONParser().parse(request) logger.debug(data) last_sync = data.pop('last_sync', None) autostainer_sn = data.pop('autostainer_sn', None) autostainer, created = AutoStainerStation.objects\ .get_or_create(autostainer_sn=autostainer_sn) if not last_sync: # we've never sync'd before, # TODO: decide what to do if we've never synced before logger.warning('%s "last_sync" was None', autostainer_sn) return Response(status=status.HTTP_400_BAD_REQUEST) dt_last_update = datetime.strptime(last_sync, '%Y-%m-%dT%H:%M:%S%z') missing_changes = PADelta.objects.filter(date__gt=dt_last_update)\ .exclude(autostainer_sn=autostainer_sn) serializer = PADeltaSerializer(missing_changes, many=True) # keep a record of when the last time an autostainer has synced autostainer.latest_sync_time_PA = now() autostainer.save() return Response(serializer.data,status=status.HTTP_200_OK)
def user_detail(request, pk): try: user = User.objects.get(pk=pk) except User.DoesNotExist: return HttpResponse(status=404) if request.method == 'GET': serializer = UserSerializer(user) return JsonResponse(serializer.data) elif request.method == 'PUT' or request.method == 'PATCH': data = JSONParser().parse(request) if 'avatar' in data: avatar = data.pop('avatar') try: profile = Profile.objects.get(user=pk) profile.avatar = avatar profile.save() except Profile.DoesNotExist: Profile.objects.create(user=user, avatar=avatar) elif 'token' in data: token = data.pop('token') try: profile = Profile.objects.get(user=pk) profile.token = token profile.save() except Profile.DoesNotExist: Profile.objects.create(user=user, avatar=avatar) serializer = UserSerializer(user, data=data) if serializer.is_valid(): serializer.save() return JsonResponse(serializer.data) return JsonResponse(serializer.errors, status=400) elif request.method == 'DELETE': user.delete() return HttpResponse(status=204)
def data_list(request): if request.method == 'GET': datas = DataArticle.objects.all().order_by('-id') data_serializer = HistorySerializer(datas, many=True) return JsonResponse(data_serializer.data, safe=False) # 'safe=False' for objects serialization elif request.method == 'POST': post_data = JSONParser().parse(request) myurl = post_data['url'] mytimedelta = post_data['timedelta'] post_data.pop('url', None) post_data.pop('timedelta', None) i = 0 obj = np.array([]) for element in post_data: obj = np.insert(obj,i,post_data[element]) i += 1 loaded_model = pickle.load(open('Prediction/finalized_modelLogistic2.sav', 'rb')) result = loaded_model.predict(obj.reshape(1, -1)) print(result[0]) post_data.update({"popularity": result[0]}) post_data.update({"url": myurl}) post_data.update({"timedelta": mytimedelta}) post_serializer = HistorySerializer(data=post_data) if post_serializer.is_valid(): post_serializer.save() return JsonResponse(post_serializer.data, status=status.HTTP_201_CREATED) return JsonResponse(post_serializer.errors, status=status.HTTP_400_BAD_REQUEST) elif request.method == 'DELETE': count = DataArticle.objects.all().delete() return JsonResponse({'message': '{} All data were deleted successfully!'.format(count[0])}, status=status.HTTP_204_NO_CONTENT)
def create_process(request): # Look up plugin instance (throws if plugin is not installed for this community) # TODO(#50): change this to support multiple plugin instances of the same type plugin = get_plugin_instance(plugin_name, request.community) payload = JSONParser().parse(request) callback_url = payload.pop("callback_url", None) # pop to remove it # Start a new process process = plugin.start_process(slug, callback_url, **payload) # Return 202 with resource location in header response = HttpResponse(status=HTTPStatus.ACCEPTED) response[ "Location"] = f"/{utils.construct_process_url(plugin_name, slug)}/{process.pk}" return response
def tournament_games(request, tournament): if request.method == 'GET': snippets = Game.objects.filter(tournament__name=tournament) serializer = GameSerializerGet(snippets, many=True) return JsonResponse(serializer.data, safe=False) elif request.method == 'POST': data = JSONParser().parse(request) data['tournament'] = Tournament.objects.get(name=tournament).id players = data.pop('players') serializer = GameSerializer(data=data) if serializer.is_valid(): game = serializer.save() for player in players: player, created = Player.objects.get_or_create(name=player) GamePlayer(game=game, player=player).save() return JsonResponse(serializer.data, status=201) return JsonResponse(serializer.errors, status=400)
def object_type_list(request): """ GET: List all object types. POST: Bind a new object type. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': keys = r.keys("object_type:*") object_types = [] for key in keys: name = key.split(":")[1] types_list = r.lrange(key, 0, -1) object_types.append({"name": name, "types_list": types_list}) return JSONResponse(object_types, status=status.HTTP_200_OK) if request.method == "POST": data = JSONParser().parse(request) name = data.pop("name", None) if not name: return JSONResponse('Object type must have a name as identifier', status=status.HTTP_400_BAD_REQUEST) if r.exists('object_type:' + str(name)): return JSONResponse('Object type ' + str(name) + ' already exists.', status=status.HTTP_400_BAD_REQUEST) if "types_list" not in data or not data["types_list"]: return JSONResponse( 'Object type must have a types_list defining the valid object types', status=status.HTTP_400_BAD_REQUEST) if r.rpush('object_type:' + str(name), *data["types_list"]): return JSONResponse('Object type has been added in the registy', status=status.HTTP_201_CREATED) return JSONResponse('Error storing the object type in the DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def post(self, request): data = JSONParser().parse(request) print(data) try: # user = User.objects.get(username=data.pop("username")) print(data) serializer = ObjCreatedByUserSerializer(data=data) if serializer.is_valid(): # Can pass string in and find user or pass user object in serializer.save(username=data.pop("username")) return JsonResponse(serializer.data, status=201) else: print(serializer.errors) return HttpResponse(status=400) except: return HttpResponse(status=400)
def dogList(request): if request.method == 'GET': dogs = Dog.objects.all() serializer = DogSerializer(dogs, many=True) return Response(serializer.data) elif request.method == 'POST': data = JSONParser().parse(request) owner = data.pop('owner') try: owner = DogOwner.objects.get(name=owner['name']) data['owner'] = owner.id print(data) dogSerializer = DogSerializer(data=data) if dogSerializer.is_valid(): dogSerializer.save() return Response(dogSerializer.data, status=status.HTTP_201_CREATED) return Response(dogSerializer.errors, status=status.HTTP_400_BAD_REQUEST) except User.DoesNotExist: return Response("The user does not exist", status=status.HTTP_400_BAD_REQUEST) except DogOwner.DoesNotExist: return Response("The specified user is not an owner", status=status.HTTP_400_BAD_REQUEST)
def object_type_list(request): """ GET: List all object types. POST: Bind a new object type. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) if request.method == 'GET': keys = r.keys("object_type:*") object_types = [] for key in keys: name = key.split(":")[1] types_list = r.lrange(key, 0, -1) object_types.append({"name": name, "types_list": types_list}) return JSONResponse(object_types, status=status.HTTP_200_OK) if request.method == "POST": data = JSONParser().parse(request) name = data.pop("name", None) if not name: return JSONResponse('Object type must have a name as identifier', status=status.HTTP_400_BAD_REQUEST) if r.exists('object_type:' + str(name)): return JSONResponse('Object type ' + str(name) + ' already exists.', status=status.HTTP_400_BAD_REQUEST) if "types_list" not in data or not data["types_list"]: return JSONResponse('Object type must have a types_list defining the valid object types', status=status.HTTP_400_BAD_REQUEST) if r.rpush('object_type:' + str(name), *data["types_list"]): return JSONResponse('Object type has been added in the registy', status=status.HTTP_201_CREATED) return JSONResponse('Error storing the object type in the DB', status=status.HTTP_500_INTERNAL_SERVER_ERROR) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=status.HTTP_405_METHOD_NOT_ALLOWED)
def access_control(request): try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) if request.method == 'GET': acl = [] project_list = get_project_list() try: keys = r.keys('acl:*') for it in keys: for key, value in r.hgetall(it).items(): policy = json.loads(value) to_json_bools(policy, 'list', 'write', 'read') target_id = it.replace('acl:', '') target_split = target_id.split(':') if len(target_split) > 1: target_name = project_list[target_id.split(':')[0]]+'/'+target_id.split(':')[1] else: target_name = project_list[target_id.split(':')[0]] p = {'id': key, 'target_id': target_id, 'target_name': target_name} p.update(policy) acl.append(p) except DataError: return JSONResponse("Error retrieving policy", status=400) return JSONResponse(acl, status=status.HTTP_200_OK) if request.method == 'POST': data = JSONParser().parse(request) try: if data['container_id']: key = 'acl:' + data['project_id'] + ':' + data['container_id'] else: key = 'acl:' + data['project_id'] acl_id = str(r.incr('acls:id')) data.pop('container_id') data.pop('project_id') data['object_name'] = ', '.join(r.lrange('object_type:' + data['object_type'], 0, -1)) identity = data.pop('identity') access = data.pop('access') if access == 'list': data['list'] = True data['read'] = False data['write'] = False elif access == 'read': data['list'] = False data['read'] = True data['write'] = False elif access == 'read-write': data['list'] = False data['read'] = True data['write'] = True if 'user_id' in identity: data['user_id'] = identity.replace('user_id:', '') data['group_id'] = '' elif 'group_id' in identity: data['group_id'] = identity.replace('group_id:', '') data['user_id'] = '' r.hset(key, acl_id, json.dumps(data)) return JSONResponse("Access control policy created", status=201) except DataError: return JSONResponse("Error creating policy", status=400) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def access_control_detail(request, policy_id): """ Get or delete an access control. """ try: r = get_redis_connection() except RedisError: return JSONResponse('Error connecting with DB', status=500) target_id = str(policy_id).split(':')[:-1] target_id = ':'.join(target_id) acl_id = str(policy_id).split(':')[-1] if request.method == 'GET': try: project_list = get_project_list() policy_redis = r.hget("acl:" + str(target_id), acl_id) policy = json.loads(policy_redis) to_json_bools(policy, 'list', 'write', 'read') target_split = target_id.split(':') if len(target_split) > 1: target_name = project_list[target_id.split(':')[0]]+'/'+target_id.split(':')[1] else: target_name = project_list[target_id.split(':')[0]] p = {'id': acl_id, 'target_id': target_id, 'target_name': target_name} p.update(policy) return JSONResponse(p, status=status.HTTP_200_OK) except DataError: return JSONResponse("Error retrieving policy", status=400) if request.method == 'DELETE': try: r.hdel("acl:" + str(target_id), acl_id) except DataError: return JSONResponse("Error retrieving policy", status=400) return JSONResponse("Access policy correctly removed", status=status.HTTP_200_OK) if request.method == 'PUT': data = JSONParser().parse(request) try: policy_redis = r.hget("acl:" + str(target_id), acl_id) policy = json.loads(policy_redis) access = data.pop('access') if access == 'list': data['list'] = True data['read'] = False data['write'] = False elif access == 'read': data['list'] = False data['read'] = True data['write'] = False elif access == 'read-write': data['list'] = False data['read'] = True data['write'] = True policy.update(data) policy['object_name'] = ', '.join(r.lrange('object_type:' + policy['object_type'], 0, -1)) r.hset("acl:" + str(target_id), acl_id, json.dumps(policy)) return JSONResponse('Data updated', status=status.HTTP_201_CREATED) except DataError: return JSONResponse("Error creating policy", status=400) return JSONResponse('Method ' + str(request.method) + ' not allowed.', status=405)
def post(self, request, *args, **kwargs): import_file = tarfile.open(fileobj=request.FILES.get("course-import-file"), mode="r:gz") file_names = import_file.getnames() json_file_name = [s for s in file_names if ".json" in s][0] json_file = import_file.extractfile(json_file_name) stream = BytesIO(json_file.read()) course_data = JSONParser().parse(stream) course_slug = course_data.get("slug") try: course = Course.objects.get(slug=course_slug) if course.has_started: return Response({"error": "course_started"}) elif not request.DATA.get("force"): return Response({"error": "course_exists"}) except Course.DoesNotExist: course = None course_thumbnail_path = course_data.pop("thumbnail") course_home_thumbnail_path = course_data.pop("home_thumbnail") # Save course professor images course_author_pictures = {} for course_author in course_data.get("course_authors"): author_name = course_author.get("name") picture_path = course_author.pop("picture") if picture_path and author_name: picture_path = picture_path.split("/", 2)[-1] course_author_pictures[author_name] = picture_path # save course material images course_material = course_data.get("course_material") course_material_files = [] if course_material: course_material_files = course_data["course_material"].pop("files") # course_material_files = course_material.pop('files') if course: course_serializer = CourseImportSerializer(course, data=course_data) else: course_serializer = CourseImportSerializer(data=course_data) if course_serializer.is_valid(): course_obj = course_serializer.save() # save thumbnail and home thumbnail if course_thumbnail_path and course_thumbnail_path in file_names: course_thumbnail_file = import_file.extractfile(course_thumbnail_path) course_obj.thumbnail = DjangoFile(course_thumbnail_file) if course_home_thumbnail_path and course_home_thumbnail_path in file_names: course_home_thumbnail_file = import_file.extractfile(course_home_thumbnail_path) course_obj.home_thumbnail = DjangoFile(course_home_thumbnail_file) course_material_files_list = [] for course_material_file in course_material_files: course_material_file_path = course_material_file.get("file") course_material_file_obj = import_file.extractfile(course_material_file_path) course_material_files_list.append(TimtecFile(file=DjangoFile(course_material_file_obj))) course_obj.course_material.files = course_material_files_list course_obj.course_material.text = course_material["text"] course_obj.course_material.save() for course_author in course_obj.course_authors.all(): picture_path = course_author_pictures.get(course_author.name) if picture_path and picture_path in file_names: picture_file_obj = import_file.extractfile(picture_path) course_author.picture = DjangoFile(picture_file_obj) course_author.save() course_obj.save() return Response( { "new_course_url": reverse_lazy( "administration.edit_course", kwargs={"course_id": course_serializer.object.id} ) } ) else: return Response({"error": "invalid_file"})
def post(self, request, *args, **kwargs): import_file = tarfile.open(fileobj=request.FILES.get('course-import-file'), mode='r:gz') file_names = import_file.getnames() json_file_name = [s for s in file_names if '.json' in s][0] json_file = import_file.extractfile(json_file_name) stream = BytesIO(json_file.read()) course_data = JSONParser().parse(stream) course_slug = course_data.get('slug') try: course = Course.objects.get(slug=course_slug) if course.has_started: return Response({'error': 'course_started'}) elif not request.DATA.get('force'): return Response({'error': 'course_exists'}) except Course.DoesNotExist: course = None course_thumbnail_path = course_data.pop('thumbnail') course_home_thumbnail_path = course_data.pop('home_thumbnail') # Save course professor images course_author_pictures = {} for course_author in course_data.get('course_authors'): author_name = course_author.get('name') picture_path = course_author.pop('picture') if picture_path and author_name: picture_path = picture_path.split('/', 2)[-1] course_author_pictures[author_name] = picture_path # save course material images course_material = course_data.get('course_material') course_material_files = [] if course_material: course_material_files = course_data['course_material'].pop('files') # course_material_files = course_material.pop('files') if course: course_serializer = CourseImportSerializer(course, data=course_data) else: course_serializer = CourseImportSerializer(data=course_data) if course_serializer.is_valid(): course_obj = course_serializer.save() # save thumbnail and home thumbnail if course_thumbnail_path and course_thumbnail_path in file_names: course_thumbnail_file = import_file.extractfile(course_thumbnail_path) course_obj.thumbnail = DjangoFile(course_thumbnail_file) if course_home_thumbnail_path and course_home_thumbnail_path in file_names: course_home_thumbnail_file = import_file.extractfile(course_home_thumbnail_path) course_obj.home_thumbnail = DjangoFile(course_home_thumbnail_file) course_material_files_list = [] for course_material_file in course_material_files: course_material_file_path = course_material_file.get('file') course_material_file_obj = import_file.extractfile(course_material_file_path) course_material_files_list.append(TimtecFile(file=DjangoFile(course_material_file_obj))) course_obj.course_material.files = course_material_files_list for course_author in course_obj.course_authors.all(): picture_path = course_author_pictures.get(course_author.name) if picture_path and picture_path in file_names: picture_file_obj = import_file.extractfile(picture_path) course_author.picture = DjangoFile(picture_file_obj) course_author.save() course_obj.save() return Response({'new_course_url': reverse_lazy('administration.edit_course', kwargs={'course_id': course_serializer.object.id}), }) else: return Response({'error': 'invalid_file'})
def post(self, request, *args, **kwargs): import_file = tarfile.open(fileobj=request.FILES.get('course-import-file'), mode='r:gz') file_names = import_file.getnames() json_file_name = [s for s in file_names if '.json' in s][0] json_file = import_file.extractfile(json_file_name) stream = BytesIO(json_file.read()) course_data = JSONParser().parse(stream) course_slug = course_data.get('slug') try: course = Course.objects.get(slug=course_slug) if course.has_started: return Response({'error': 'course_started'}) elif not request.DATA.get('force'): return Response({'error': 'course_exists'}) except Course.DoesNotExist: course = None course_thumbnail_path = course_data.pop('thumbnail') if course_thumbnail_path: course_thumbnail_path = course_thumbnail_path.replace("/media/", "", 1) course_home_thumbnail_path = course_data.pop('home_thumbnail') if course_home_thumbnail_path: course_home_thumbnail_path = course_home_thumbnail_path.replace("/media/", "", 1) # Save course professor images course_author_pictures = {} for key, course_author in enumerate(course_data.get('course_authors')): # fix to truncate course author name to size 30 (why size 30?????) course_data['course_authors'][key]['name'] = course_data['course_authors'][key]['name'][:30] author_name = course_author.get('name') picture_path = course_author.pop('picture') if picture_path and author_name: picture_path = picture_path.split('/', 2)[-1] course_author_pictures[author_name] = picture_path # save course material images course_material = course_data.get('course_material') course_material_files = [] if course_material: course_material_files = course_data['course_material'].pop('files') # course_material_files = course_material.pop('files') # If there are any activities of 'image' type, its files must be given to django now for lesson in course_data['lessons']: for unit in lesson['units']: for activity in unit['activities']: if activity['type'] == 'image': try: image_path = activity.pop('image').replace("/media/", "", 1) new_activity = Activity.objects.create( type='image', image=DjangoFile(import_file.extractfile(image_path)) ) activity['id'] = new_activity.id except AttributeError: # This activity image has no file pass if course: course_serializer = CourseImportSerializer(course, data=course_data) else: course_serializer = CourseImportSerializer(data=course_data) if course_serializer.is_valid(): course_obj = course_serializer.save() # save thumbnail and home thumbnail if course_thumbnail_path and course_thumbnail_path in file_names: course_thumbnail_file = import_file.extractfile(course_thumbnail_path) course_obj.thumbnail = DjangoFile(course_thumbnail_file) if course_home_thumbnail_path and course_home_thumbnail_path in file_names: course_home_thumbnail_file = import_file.extractfile(course_home_thumbnail_path) course_obj.home_thumbnail = DjangoFile(course_home_thumbnail_file) # save course material files course_material_files_list = [] for course_material_file in course_material_files: course_material_file_path = course_material_file.get('file').replace("/media/", "", 1) # remove unnecessary "media" path, if any try: course_material_file_obj = import_file.extractfile(course_material_file_path) course_material_files_list.append(TimtecFile(file=DjangoFile(course_material_file_obj))) except KeyError: pass course_obj.course_material.files = course_material_files_list course_obj.course_material.text = course_material['text'] course_obj.course_material.save() # If the course has authors, save save their pictures, if any for course_author in course_obj.course_authors.all(): picture_path = course_author_pictures.get(course_author.name).replace("/media/", "", 1) if picture_path and picture_path in file_names: picture_file_obj = import_file.extractfile(picture_path) course_author.picture = DjangoFile(picture_file_obj) course_author.save() # Save all changes in the new imported course course_obj.save() return Response({'new_course_url': reverse_lazy('administration.edit_course', kwargs={'course_id': course_obj.id}), }) else: return Response({'error': 'invalid_file'})