def render(self, data, accepted_media_type=None, renderer_context=None): """ Render `data` into JSON, returning a bytestring. """ if data is None: return bytes() renderer_context = renderer_context or {} indent = self.get_indent(accepted_media_type, renderer_context) if indent is None: separators = SHORT_SEPARATORS if self.compact else LONG_SEPARATORS else: separators = INDENT_SEPARATORS ret = json.dumps( data, cls=self.encoder_class, indent=indent, ensure_ascii=self.ensure_ascii, allow_nan=not self.strict, separators=separators ) # We always fully escape \u2028 and \u2029 to ensure we output JSON # that is a strict javascript subset. # See: http://timelessrepo.com/json-isnt-a-javascript-subset ret = ret.replace('\u2028', '\\u2028').replace('\u2029', '\\u2029') return ret.encode()
def render(self, data, media_type=None, renderer_context=None): renderer_context = renderer_context or {} request = renderer_context['request'] template = request.query_params.get('template', 'export') view = renderer_context['view'] data = json.loads(json.dumps(data, cls=encoders.JSONEncoder)) if template == 'import': data = [data[0]] if data else data try: serializer = view.get_serializer() self.set_response_disposition(serializer, renderer_context) except Exception as e: logger.debug(e, exc_info=True) value = 'The resource not support export!'.encode('utf-8') else: fields = serializer.get_fields() header = self._get_header(fields, template) labels = {k: v.label for k, v in fields.items() if v.label} table = self._gen_table(data, header, labels) csv_buffer = BytesIO() csv_buffer.write(codecs.BOM_UTF8) csv_writer = unicodecsv.writer(csv_buffer, encoding='utf-8') for row in table: csv_writer.writerow(row) value = csv_buffer.getvalue() return value
def render(self, data, accepted_media_type=None, renderer_context=None): """ Render `data` into JSON, returning a bytestring. """ if data is None: return bytes() renderer_context = renderer_context or {} indent = self.get_indent(accepted_media_type, renderer_context) if indent is None: separators = SHORT_SEPARATORS if self.compact else LONG_SEPARATORS else: separators = INDENT_SEPARATORS ret = json.dumps( data, cls=self.encoder_class, indent=indent, ensure_ascii=self.ensure_ascii, allow_nan=not self.strict, separators=separators ) # On python 2.x json.dumps() returns bytestrings if ensure_ascii=True, # but if ensure_ascii=False, the return type is underspecified, # and may (or may not) be unicode. # On python 3.x json.dumps() returns unicode strings. if isinstance(ret, six.text_type): # We always fully escape \u2028 and \u2029 to ensure we output JSON # that is a strict javascript subset. If bytes were returned # by json.dumps() then we don't have these characters in any case. # See: http://timelessrepo.com/json-isnt-a-javascript-subset ret = ret.replace('\u2028', '\\u2028').replace('\u2029', '\\u2029') return bytes(ret.encode('utf-8')) return ret
def as_form_field(self): value = self.value # When HTML form input is used and the input is not valid # value will be a JSONString, rather than a JSON primitive. if not getattr(value, 'is_json_string', False): try: value = json.dumps(self.value, sort_keys=True, indent=4) except (TypeError, ValueError): pass return self.__class__(self._field, value, self.errors, self._prefix)
def test_dumps(self): with self.assertRaises(ValueError): json.dumps(float('inf')) with self.assertRaises(ValueError): json.dumps(float('nan'))
def test_user_with_lease_can_create_hidden_issues(self): house = create_house("Test House", create_agency("*****@*****.**")) user = create_student_with_lease("*****@*****.**", house) self.client.force_login(user) post = json.dumps( { "title": "Big Issue Not Hidden", "houseid": house.id, "message": "Some sensible message here" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertEqual(response.status_code, status.HTTP_201_CREATED) post = json.dumps( { "title": "Big Issue Hidden", 'hidden': "True", "houseid": house.id, "message": "Some sensible message here" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertEqual(response.status_code, status.HTTP_201_CREATED) self.client.logout() response = self.client.get( reverse("issues:api-issue-index", args=(house.id, ))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertContains(response, "Big Issue Not Hidden") self.assertContains(response, "\"id\":1") self.assertNotContains(response, "Big Issue Hidden") self.assertNotContains(response, "\"id\":2") self.client.force_login(user) response = self.client.get( reverse("issues:api-issue-index", args=(house.id, ))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertContains(response, "Big Issue Not Hidden") self.assertContains(response, "\"id\":1") self.assertContains(response, "Big Issue Hidden") self.assertContains(response, "\"id\":2") another_user = create_student_with_lease("*****@*****.**", house) self.client.force_login(another_user) post = json.dumps( { "title": "Another issue that is hidden", 'hidden': "True", "houseid": house.id, "message": "Wow much issue" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertEqual(response.status_code, status.HTTP_201_CREATED) response = self.client.get( reverse("issues:api-issue-index", args=(house.id, ))) self.assertEqual(response.status_code, status.HTTP_200_OK) self.assertContains(response, "Big Issue Not Hidden") self.assertContains(response, "\"id\":1") self.assertContains(response, "Big Issue Hidden") self.assertContains(response, "\"id\":2") self.assertContains(response, "Another issue that is hidden") self.assertContains(response, "\"id\":3")
def get(self, request, vServerId): print('vServerId=' + str(vServerId)) daysToDisplay = 60 dataPoints = 720 afterDttm = timezone.now() - timezone.timedelta(days=daysToDisplay) mountPoints = Metrics_MountPoint.objects \ .filter(server_id=vServerId) \ .filter(created_dttm__gte=afterDttm) \ .filter(error_cnt=0) \ .exclude(mount_point='') \ .order_by('-created_dttm') dataList = [] mntSlashList = [] mntDataList = [] mntLogsList = [] mntBkupsList = [] mntHomeList = [] mntTmpList = [] mntCList = [] for a in mountPoints: myDateStr = a.created_dttm.strftime("%Y-%m-%dT%H:%M:%S.000Z") dataPoint = {'name': myDateStr, 'value': str(a.used_pct)} if (a.mount_point == '/'): mntSlashList.append(dataPoint) elif (a.mount_point == '/opt/pgsql/data'): mntDataList.append(dataPoint) elif (a.mount_point == '/opt/pgsql/logs'): mntLogsList.append(dataPoint) elif (a.mount_point == '/opt/pgsql/backups'): mntBkupsList.append(dataPoint) elif (a.mount_point == '/home'): mntHomeList.append(dataPoint) elif (a.mount_point == '/tmp'): mntTmpList.append(dataPoint) elif (a.mount_point == 'C'): mntCList.append(dataPoint) else: continue # Reduce the number of plotpoint to be 300 or less per mountpoint for x in range(dataPoints, len(mntSlashList)): mntSlashList.pop(randint(0, len(mntSlashList) - 1)) for x in range(dataPoints, len(mntDataList)): mntDataList.pop(randint(0, len(mntDataList) - 1)) for x in range(dataPoints, len(mntLogsList)): mntLogsList.pop(randint(0, len(mntLogsList) - 1)) for x in range(dataPoints, len(mntBkupsList)): mntBkupsList.pop(randint(0, len(mntBkupsList) - 1)) for x in range(dataPoints, len(mntHomeList)): mntHomeList.pop(randint(0, len(mntHomeList) - 1)) for x in range(dataPoints, len(mntTmpList)): mntTmpList.pop(randint(0, len(mntTmpList) - 1)) for x in range(dataPoints, len(mntCList)): mntCList.pop(randint(0, len(mntCList) - 1)) dataList.append({'name': '/', 'series': mntSlashList}) dataList.append({'name': 'data', 'series': mntDataList}) dataList.append({'name': 'logs', 'series': mntLogsList}) dataList.append({'name': 'backups', 'series': mntBkupsList}) dataList.append({'name': 'home', 'series': mntHomeList}) dataList.append({'name': 'tmp', 'series': mntTmpList}) dataList.append({'name': 'C', 'series': mntCList}) mountPointGraphData = json.dumps(dataList) return HttpResponse(mountPointGraphData, status=status.HTTP_200_OK)
def render(self, data, media_type=None, renderer_context=None): structure = self.get_structure(data) return json.dumps(structure, indent=4).encode('utf-8')
def set_context(self, renderer_context, swagger): super(SwaggerUIRenderer, self).set_context(renderer_context, swagger) renderer_context['swagger_settings'] = json.dumps( self.get_swagger_ui_settings())
def test_post_item(self): data = {'title': 'Some rando title', 'content': 'some more content'} url = api_reverse('api-postings:post-listcreate') response = self.client.post(url, json.dumps(data), format='json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
from rest_framework.response import Response from rest_framework.decorators import api_view from rest_framework.parsers import JSONParser from rest_framework import status from rest_framework.utils import json from .models import Banknote from .serializers import BanknoteSerializer success_false = json.dumps({ "success": False, }) def amount_recursion(amount_tmp, lst, i, result): print(amount_tmp, lst, i, result) if amount_tmp == 0 or i > len(lst) - 1: return amount_tmp else: if amount_tmp >= lst[i].get('value') and lst[i].get('quantity'): amount_tmp -= lst[i].get('value') lst[i]['quantity'] -= 1 if not len( result) or result[-1].get('value') != lst[i].get('value'): result.append({'value': lst[i].get('value'), 'quantity': 0}) result[-1]['quantity'] += 1 return amount_recursion(amount_tmp, lst, i, result) else: return amount_recursion(amount_tmp, lst, i + 1, result)
def get(self, request, *args, **kwargs): warnings = warning.objects.filter(deal=False) ret = {"code": 200, "msg": "获取成功", "warn_num": len(warnings)} return HttpResponse(json.dumps(ret))
def get(self, request, *args, **kwargs): warn_id = request.GET.get("warn_id") warnings = warning.objects.filter(warn_id=warn_id).first() myser = MySerializer1(instance=warnings, many=False) return HttpResponse(json.dumps(myser.data))
def get(self, *args, **kwargs): user = self.request.user response = "No entry" import urllib.request import urllib.parse import json from datetime import date, datetime # read the string into a datetime object mapquestapi_key = "kwRexg4bqOtJGT6DFsibO8KHJVxYfenj" location = self.request.GET.get('location').strip().replace(" ", ",") # start_date = datetime.strptime(self.request.GET.get('start_date'), "%m/%d/%Y").timestamp() # end_date = datetime.strptime(self.request.GET.get('end_date'), "%m/%d/%Y").timestamp() # url = 'http://www.mapquestapi.com/geocoding/v1/address?key={}&location={}'.format(mapquestapi_key, location) f = urllib.request.urlopen(url).read().decode('utf-8') result = json.loads(f) no_locations = len(result['results'][0]['locations']) if no_locations > 1: message = "Please be more specific, to many locations found..." return HttpResponse(message, content_type='text/html', status=200) map = result['results'][0]['locations'][0]['mapUrl'] lat = result['results'][0]['locations'][0]["displayLatLng"]["lat"] long = result['results'][0]['locations'][0]["displayLatLng"]["lng"] import requests key = "3b54bea1042da60ea275c9e518636242" url = "https://api.openweathermap.org/data/2.5/onecall" querystring = { # "type": "hour", "appid": key, # "start_time": start_date, # "end_time": end_date, "lat":lat, "lon":long, "units":"metric" } response = requests.request("GET", url, params=querystring) result = json.loads(response.text) last_2_days = result['hourly'] last_week = result['daily'] week_data = { 'temp_min': [], 'temp_max': [], 'temp_average': [], 'humidity': [], 'dew_point': [], 'wind_speed': [], 'labels': [], } days_data = { 'temp_data': [], 'humidity': [], 'dew_point': [], 'wind_speed': [], 'labels': [], } import statistics # computes min, max, average # and median temperature and humidity for that location # and period and returns that to the user humidity = 0 for week in last_week: # get averge out of all of them week_data['temp_min'].append(week['temp']['min']) week_data['temp_max'].append(week['temp']['max']) week_data['humidity'].append(week['humidity']) week_data['dew_point'].append(week['dew_point']) week_data['wind_speed'].append(week['wind_speed']) week_data['labels'].append(time.strftime("%Y/%m/%d", time.localtime(week['dt']))) humidity += week['humidity'] weekly_humidity = humidity / 8 #time.strftime("%Z - %Y/%m/%d, %H:%M:%S", time.localtime(time.time())) for day in last_2_days: days_data['temp_data'].append(day['temp']) days_data['humidity'].append(day['humidity']) days_data['dew_point'].append(day['dew_point']) days_data['wind_speed'].append(day['wind_speed']) days_data['labels'].append(time.strftime("%Y/%m/%d", time.localtime(day['dt']))) n, average_humidity, average_temp = 0, [], [] while n != 49: average_humidity.append(statistics.mean(days_data['humidity'])) average_temp.append(statistics.mean(days_data['temp_data'])) n += 1 data = { "days_data": days_data, "week_data": week_data, "image_src":map, "weekly_humidity":weekly_humidity, "average_temp": statistics.mean(week_data['temp_min']), "average_daily_humidity": statistics.mean(days_data['humidity']), "average_daily_temp": statistics.mean(days_data['temp_data']), "chart_average_daily_humidity" :average_humidity, "chart_average_daily_temp":average_temp, } return HttpResponse(json.dumps(data), content_type='application/json', status=200)
def test_create_invalid_course(self): response = client.post(reverse('get_post_course'), data=json.dumps(self.invalid_payload), content_type='application/json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_invalid_update_course(self): response = client.put(reverse('get_delete_update_course', kwargs={'pk': self.course.pk}), data=json.dumps(self.invalid_payload), content_type='application/json') self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def destroy(self, request, *args, **kwargs): """ An admin can soft-delete a TimeSlot instance. From an API user perspective, this is no different from a normal delete. The deletion will automatically cancel associated reservations and refund used tickets to the registered users. """ instance = self.get_object() data = request.data serializer = serializers.TimeSlotSerializer(data=data) serializer.is_valid() if 'force_delete' in serializer.errors: raise rest_framework.serializers.ValidationError( {'force_delete': serializer.errors['force_delete']}) if 'custom_message' in serializer.errors: raise rest_framework.serializers.ValidationError( {'custom_message': serializer.errors['custom_message']}) if instance.reservations.filter(is_active=True).exists(): if not data.get('force_delete'): raise rest_framework.serializers.ValidationError({ "non_field_errors": [ _("Trying to do a TimeSlot deletion that affects " "users without providing `force_delete` field set " "to True.") ] }) custom_message = data.get('custom_message') reservation_cancel = instance.reservations.filter(is_active=True) affected_users = User.objects.filter( reservations__in=reservation_cancel) with transaction.atomic(): reservations_cancel_copy = copy(reservation_cancel) # The sequence is important here because the Queryset are # dynamically changing when doing update(). If the # `reservation_cancel` queryset objects are updated first, the # queryset will become empty since it was filtered using # "is_active=True". That would lead to an empty `affected_users` # queryset. # # For-loop required to handle duplicates (if user has multiple # reservations that must be canceled). # user.update(tickets=F('tickets') + 1) for user in affected_users: User.objects.filter(email=user.email).update( tickets=F('tickets') + 1) # Increment tickets reservation_cancel.update( is_active=False, cancelation_reason='TD', # TimeSlot deleted cancelation_date=timezone.now(), ) instance.delete() for reservation in reservations_cancel_copy: merge_data = { 'TIMESLOT_LIST': [instance], 'SUPPORT_EMAIL': settings.SUPPORT_EMAIL, 'CUSTOM_MESSAGE': custom_message, } plain_msg = render_to_string("cancelation.txt", merge_data) msg_html = render_to_string("cancelation.html", merge_data) try: response_send_mail = django_send_mail( "Annulation d'un bloc de rédaction", plain_msg, settings.DEFAULT_FROM_EMAIL, [reservation.user.email], html_message=msg_html, ) EmailLog.add(reservation.user.email, 'cancelation', response_send_mail) except Exception as err: additional_data = { 'title': "Annulation d'un bloc de rédaction", 'default_from': settings.DEFAULT_FROM_EMAIL, 'user_email': reservation.user.email, 'merge_data': merge_data, 'template': 'cancelation' } Log.error(source='SENDING_BLUE_TEMPLATE', message=err, additional_data=json.dumps(additional_data)) raise return Response(status=status.HTTP_204_NO_CONTENT)
def render(self, data, media_type=None, renderer_context=None): return json.dumps(data, indent=2).encode('utf-8')
def get(self, request, *args, **kwargs): warnings = warning.objects.filter(deal=False) myser = MySerializer(instance=warnings, many=True) return HttpResponse(json.dumps(myser.data))
def test_view_set(self): response = self.client.post(reverse('videos'), data=json.dumps(self.data), content_type='application/json') self.assertEqual(response.status_code, status.HTTP_201_CREATED)
draw.text((20, height - 30), 'Diseñador: {} {}\nFecha: {}'.format(i['designer_name'], i['designer_last_name'], i['created_date']), fill="white") # Obtener nombre de archivo file = '{}'.format(i['original_file']) position = file.index('.') filename = file[9:position] + '' + now print(filename) # Se guarda nueva imagen img.save(settings.MEDIA_ROOT + '/process/' + filename + '.png') # Se actualiza estado y ruta del diseño procesado data = { "id": '{}'.format(i['id']), "original_file": '{}'.format(i['original_file']), "process_file": 'process/' + filename + '.png' } r = requests.put(url_update, data=json.dumps(data), timeout=10) # registro en log de eventos now = datetime.utcnow().strftime('%Y-%m-%d%H-%M-%S-%f')[:-3] f.write("Hora: %a -> Convirtio archivo %a a archivo process/ %a .png \r" % (now, '{}'.format(i['original_file']), filename)) now = datetime.utcnow().strftime('%Y-%m-%d%H-%M-%S-%f')[:-3] f.write("Hora: %a -> Finaliza proceso \r\n" % (now)) f.close()
def get(self, request, *args, **kwargs): work = WorkAreaModel.objects.filter(workArea_status=True) myser = ActiveSerializer(instance=work, many=True) return HttpResponse(json.dumps(myser.data))
def server_error(request, *args, **kwargs): data = 'Server Error (500)' return HttpResponse(json.dumps(data), status=httpcodes.HTTP_500_INTERNAL_SERVER_ERROR, content_type='application/json')
def set_context(self, renderer_context, swagger): super(ReDocRenderer, self).set_context(renderer_context, swagger) renderer_context['redoc_settings'] = json.dumps( self.get_redoc_settings())
def bad_request(request, exception, *args, **kwargs): data = 'Bad Request (400)' return Response(json.dumps(data), status=httpcodes.HTTP_400_BAD_REQUEST, content_type='application/json')
def test_serializer_base_client_id_mutation_serializer_relay_id_field_with_method_name( ): class Serializer(serializers.Serializer): id = SerializerRelayIDField(BookType, method_name="resolve_id", source="book") title = serializers.CharField(read_only=True) @classmethod def resolve_id(cls, object_id, **kwargs): assert object_id == "2" return Book(title="resolved title") def create(self, validated_data): return validated_data.get("book") class SerializerMutation(SerializerClientIDCreateMutation): class Meta: serializer_class = Serializer class Mutation(graphene.ObjectType): serializer_mutation = SerializerMutation.Field() schema = graphene.Schema(mutation=Mutation, types=[BookType]) assert (str(schema).index(""" input SerializerMutationInput { id: ID! clientMutationId: String }""".lstrip()) > -1) mutation = """ mutation SerializerMutation($input: SerializerMutationInput!) { serializerMutation(input: $input) { title errors { field messages path } } } """ # Test: Invalid ID with pytest.warns(UserWarning): # Should not be called without context result = schema.execute( mutation, variables={"input": { "id": "asdf" }}, context={}, ) assert json.loads(json.dumps(result.data)) == { "serializerMutation": { "errors": [{ "field": "id", "messages": ["Not a valid ID."], "path": ["id"] }], "title": None, } } # Test: Invalid Object Type with pytest.warns(UserWarning): # Should not be called without context result = schema.execute( mutation, variables={"input": { "id": to_global_id("AuthorType", 1) }}, context={}, ) assert json.loads(json.dumps(result.data)) == { "serializerMutation": { "errors": [{ "field": "id", "messages": ["Must receive a BookType ID."], "path": ["id"], }], "title": None, } } # Test: Successful with pytest.warns(UserWarning): # Should not be called without context result = schema.execute( mutation, variables={"input": { "id": to_global_id("BookType", 2) }}, context={}, ) assert json.loads(json.dumps(result.data)) == { "serializerMutation": { "errors": None, "title": "resolved title" } }
def test_user_with_lease_can_create_issues(self): # with perfect data house = create_house("Test House", create_agency("*****@*****.**")) user = create_student_with_lease("*****@*****.**", house) self.client.force_login(user) # CREATE HIDDEN ISSUE post = json.dumps( { "title": "No Water", "hidden": "True", "message": "Where is the water?" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertEqual(response.status_code, 201) # CREATE VISIBLE ISSUE post = json.dumps( { "title": "Big Issue", "message": "Some sensible message here" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertEqual(response.status_code, 201) # CHECK HIDDEN ISSUE IS IN THE DB self.assertTrue( Message.objects.filter(issue=1, text="Where is the water?").exists()) self.assertTrue( Issue.objects.filter(title="No Water", house=house).exists()) self.assertEqual(Issue.objects.get(pk=1).get_creator(), user) # CHECK VISIBLE ISSUE IS IN THE DB self.assertTrue( Message.objects.filter(issue=2, text="Some sensible message here").exists()) self.assertTrue( Issue.objects.filter(title="Big Issue", house=house).exists()) self.assertEqual(Issue.objects.get(pk=2).get_creator(), user) # check that the issue index of this house contains this BOTH issues as we are currently signed in response = self.client.get( reverse("issues:api-issue-index", args=(house.id, ))) self.assertContains(response, "Big Issue") self.assertContains(response, "\"id\":2") self.assertContains(response, "No Water") self.assertContains(response, "\"id\":1") # check that if you logout you can only see the non-hidden issue self.client.logout() response = self.client.get( reverse("issues:api-issue-index", args=(house.id, ))) self.assertContains(response, "Big Issue") self.assertContains(response, "\"id\":2") self.assertNotContains(response, "No Water") self.assertNotContains(response, "\"id\":1") # LOG BACK IN self.client.force_login(user) # CHECK MESSAGE INDEX OF NON-HIDDEN-ISSUE response = self.client.get(reverse("issues:api-issue-chat", args=(2, ))) self.assertEqual(response.status_code, 200) self.assertContains(response, "\"sender\":{}".format(user.id)) self.assertContains(response, "\"text\":\"Some sensible message here\"") # CHECK MESSAGE INDEX OF HIDDEN-ISSUE response = self.client.get(reverse("issues:api-issue-chat", args=(1, ))) self.assertEqual(response.status_code, 200) self.assertContains(response, "\"sender\":{}".format(user.id)) self.assertContains(response, "\"text\":\"Where is the water?\"") # LOG OUT TO CHECK MESSAGE INDEX OF HIDDEN AND NON-HIDDEN ISSUES self.client.logout() # NON-HIDDEN-ISSUE SHOULD WORK response = self.client.get(reverse("issues:api-issue-chat", args=(2, ))) self.assertEqual(response.status_code, 200) self.assertContains(response, "\"sender\":{}".format(user.id)) self.assertContains(response, "\"text\":\"Some sensible message here\"") # HIDDEN ISSUE SHOULD THROW 404 response = self.client.get(reverse("issues:api-issue-chat", args=(1, ))) self.assertEqual(response.status_code, 404)
def get_params(cloud, region, bucket, filename, rename, expiration, content_encoding, cache_control): content_type = mimetypes.guess_type( filename)[0] or 'application/octet-stream' path = f'upload/{create_filename(filename)}' if rename else filename # 是否重命名 # 计算policy conditions = [{ 'bucket': bucket }, ['starts-with', '$key', path.split('/')[0]], ['starts-with', '$Content-Type', content_type]] policy_dict = { 'expiration': (datetime.datetime.utcnow() + datetime.timedelta(hours=expiration) ).strftime('%Y-%m-%dT%H:%M:%S.000Z'), 'conditions': conditions } # 时间相关 t = datetime.datetime.utcnow() expire = t + datetime.timedelta(days=1) amz_date = t.strftime('%Y%m%dT%H%M%SZ') date_stamp = t.strftime('%Y%m%d') if cloud == 'aws': conditions += [{ 'acl': 'public-read' }, { 'success_action_status': '204' }, { 'x-amz-meta-uuid': '14365123651274' }, { 'x-amz-server-side-encryption': 'AES256' }, ['starts-with', '$x-amz-meta-tag', ''], { 'x-amz-credential': f'{settings.CLOUD_STORAGE_ID}/{date_stamp}/{region}/s3/aws4_request' }, { 'x-amz-algorithm': 'AWS4-HMAC-SHA256' }, { 'x-amz-date': amz_date }] policy_dict['conditions'] = conditions # policy_dict['conditions'].append(['content-length-range', 1, 1024 * 1024 * 4]) if content_encoding == 'gzip': policy_dict['conditions'].append( ['starts-with', '$Content-Encoding', content_encoding]) string_to_sign = b64encode(json.dumps(policy_dict).encode('utf-8')) # params = { 'key': path, 'Content-Type': content_type, 'policy': string_to_sign } if cloud == 'aliyun': # 阿里云 signature = b64encode( hmac.new(settings.CLOUD_STORAGE_SECRET.encode('utf-8'), string_to_sign, hashlib.sha1).digest()) params.update({ 'OSSAccessKeyId': settings.CLOUD_STORAGE_ID, 'signature': signature }) elif cloud == 'aws': # AWS https://docs.aws.amazon.com/AmazonS3/latest/API/sigv4-HTTPPOSTConstructPolicy.html for condition in conditions: if isinstance(condition, dict): params.update(condition) params.pop('bucket') signing_key = get_signing_key(settings.CLOUD_STORAGE_SECRET, date_stamp, region, 's3') signature = hmac.new(signing_key, string_to_sign, hashlib.sha256).hexdigest() # 16进制 params.update({'x-amz-meta-tag': '', 'x-amz-signature': signature}) # 其他 Content-Encoding, Cache-Control if content_encoding == 'gzip': params['Content-Encoding'] = content_encoding if cache_control: params['Cache-Control'] = cache_control return params
def test_agent_user_cannot_create_issue(self): agent = create_agent_passing_agency( "*****@*****.**", create_agency("*****@*****.**")) house = create_house("test1", create_agency("*****@*****.**")) self.client.force_login(agent) # with perfect data post = json.dumps( { "title": "Big Issue Not Hidden", "message": "Some sensible message here" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertTrue(response.status_code, status.HTTP_400_BAD_REQUEST) # passing random house post = json.dumps( { "title": "Big Issue Not Hidden", "house": 21, "message": "Some sensible message here" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertTrue(response.status_code, status.HTTP_400_BAD_REQUEST) # passing random user post = json.dumps( { "title": "Big Issue Not Hidden", "user": 1, "message": "Some sensible message here" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertTrue(response.status_code, status.HTTP_400_BAD_REQUEST) # random date post = json.dumps( { "title": "Big Issue Not Hidden", "pub_date": timezone.now() + timezone.timedelta(days=1), "message": "Some sensible message here" }, indent=4, sort_keys=True, default=str) response = self.client.post(reverse("issues:api-issue-create", args=(house.id, )), data=post, content_type="application/json") self.assertTrue(response.status_code, status.HTTP_400_BAD_REQUEST) # nothing has been created self.assertTrue(not Issue.objects.all().exists())
def get(self, request, *args, **kwargs): equipment = info.objects.filter(equipment_frozen=False).all() ret = MySerializer2(instance=equipment, many=True) return HttpResponse(json.dumps(ret.data))
import requests from rest_framework.utils import json AUTH_ENDPOINT = "http://127.0.0.1:8000/api-auth/jwt/" REFRESH_ENDPOINT = AUTH_ENDPOINT + "refresh/" ENDPOINT = "http://127.0.0.1:8000/powtoons/" headers = {"Content-Type": "application/json"} data = {"username": "******", "password": "******"} r = requests.post(AUTH_ENDPOINT, data=json.dumps(data), headers=headers) token = r.json()["token"] print r.json() print token refresh_data = {"token": token} # r2 = requests.post(REFRESH_ENDPOINT, data=json.dumps(refresh_data), headers=headers) # print r2.json() print "GET POWTOONS" headers["Authorization"] = "JWT " + token r = requests.get(ENDPOINT, headers=headers) for p in r.json(): print p print "SHARE POWTOON TO USER WITH PK " post_data = json.dumps({"user_id": 3}) new_r = requests.post(ENDPOINT + str(9) + "/share/", data=post_data,
def test_create_multiple_tasks_with_valid_payload(self): valid_payload = { 'task': { 'dependency_network_id': 1, 'name': 'new task', 'description': 'instructions for new task' }, 'dependencies': [{ 'dependency_network_id': 1, 'name': 'ancestor 1', 'description': 'instructions for ancestor 1 task' }, { 'dependency_network_id': 1, 'name': 'ancestor 2', 'description': 'instructions for ancestor 2 task' }], 'dependents': [{ 'dependency_network_id': 1, 'name': 'successor 1', 'description': 'instructions for successor 1 task' }, { 'dependency_network_id': 1, 'name': 'successor 2', 'description': 'instructions for successor 2 task' }] } response = self.client.post(reverse('create_batch_tasks'), data=json.dumps(valid_payload), content_type='application/json') self.assertEqual(response.status_code, status.HTTP_201_CREATED) main_task = Task.objects.get(name='new task') self.assertIsNotNone(main_task) self.assertEqual(main_task.description, 'instructions for new task') depency1 = list(Dependency.objects.filter(task_id=main_task.id)) self.assertEqual(len(depency1), 2) self.assertEqual(depency1[0].depends_on_task.name, 'ancestor 1') self.assertEqual(depency1[1].depends_on_task.name, 'ancestor 2') ancestor1 = Task.objects.get(name='ancestor 1') self.assertIsNotNone(ancestor1) self.assertEqual(ancestor1.description, 'instructions for ancestor 1 task') ancestor2 = Task.objects.get(name='ancestor 2') self.assertIsNotNone(ancestor2) self.assertEqual(ancestor2.description, 'instructions for ancestor 2 task') successor1 = Task.objects.get(name='successor 1') self.assertIsNotNone(successor1) self.assertEqual(successor1.description, 'instructions for successor 1 task') depency2 = list(Dependency.objects.filter(task_id=successor1.id)) self.assertEqual(len(depency2), 1) self.assertEqual(depency2[0].depends_on_task.name, 'new task') successor2 = Task.objects.get(name='successor 2') self.assertIsNotNone(successor2) self.assertEqual(successor2.description, 'instructions for successor 2 task') depency3 = list(Dependency.objects.filter(task_id=successor2.id)) self.assertEqual(len(depency3), 1) self.assertEqual(depency3[0].depends_on_task.name, 'new task') main_task.delete() ancestor1.delete() ancestor2.delete() successor1.delete() successor2.delete()
def set_context(self, renderer_context, swagger=None): super(ReDocRenderer, self).set_context(renderer_context, swagger) renderer_context['redoc_settings'] = json.dumps( self.get_redoc_settings(), cls=encoders.JSONEncoder)
def request_callback(r, _, response_headers): if not updated_rr_sets: # nothing to assert return [200, response_headers, ''] body = json.loads(r.parsed_body) self.failIf( 'rrsets' not in body.keys(), 'pdns zone update request malformed: did not contain a list of RR sets.' ) try: # if an assertion fails, an exception is raised. We want to send a reply anyway! with SQLiteReadUncommitted( ): # tests are wrapped in uncommitted transactions, so we need to see inside # convert updated_rr_sets into a plain data type, if Django models were given if isinstance(updated_rr_sets, list): updated_rr_sets_dict = {} for rr_set in updated_rr_sets: updated_rr_sets_dict[(rr_set.type, rr_set.subname, rr_set.ttl)] = rrs = [] for rr in rr_set.records.all(): rrs.append(rr.content) elif isinstance(updated_rr_sets, dict): updated_rr_sets_dict = updated_rr_sets else: raise ValueError( 'updated_rr_sets must be a list of RRSets or a dict.' ) # check expectations self.assertEqual( len(updated_rr_sets_dict), len(body['rrsets']), 'Saw an unexpected number of RR set updates: expected %i, intercepted %i.' % (len(updated_rr_sets_dict), len(body['rrsets']))) for (exp_type, exp_subname, exp_ttl), exp_records in updated_rr_sets_dict.items(): expected_name = '.'.join( filter(None, [exp_subname, name])) + '.' for seen_rr_set in body['rrsets']: if (expected_name == seen_rr_set['name'] and exp_type == seen_rr_set['type']): # TODO replace the following asserts by assertTTL, assertRecords, ... or similar if len(exp_records): self.assertEqual(exp_ttl, seen_rr_set['ttl']) self.assertEqual( set(exp_records), set([ rr['content'] for rr in seen_rr_set['records'] ]), ) break else: # we did not break out, i.e. we did not find a matching RR set in body['rrsets'] self.fail( 'Expected to see an pdns zone update request for RR set of domain `%s` with name ' '`%s` and type `%s`, but did not see one. Seen update request on %s for RR sets:' '\n\n%s' % (name, expected_name, exp_type, request['uri'], json.dumps(body['rrsets'], indent=4))) finally: return [200, response_headers, '']