class FixtureOptions(forms.Form): _selected_action = forms.CharField(widget=forms.MultipleHiddenInput) use_natural_key = forms.BooleanField(required=False) on_screen = forms.BooleanField(label='Dump on screen', required=False) indent = forms.IntegerField(required=True, max_value=10, min_value=0) serializer = forms.ChoiceField( choices=zip(get_serializer_formats(), get_serializer_formats()))
class FixtureOptions(forms.Form): _selected_action = forms.CharField(widget=forms.MultipleHiddenInput) select_across = forms.BooleanField(label='', required=False, initial=0, widget=forms.HiddenInput({'class': 'select-across'})) action = forms.CharField(label='', required=True, initial='', widget=forms.HiddenInput()) use_natural_key = forms.BooleanField(required=False) on_screen = forms.BooleanField(label='Dump on screen', required=False) add_foreign_keys = forms.BooleanField(required=False) indent = forms.IntegerField(required=True, max_value=10, min_value=0) serializer = forms.ChoiceField(choices=zip(get_serializer_formats(), get_serializer_formats()))
def test_aware_datetime_in_other_timezone(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) data = serializers.serialize('python', [Event(dt=dt)]) self.assertEqual(data[0]['fields']['dt'], dt) obj = serializers.deserialize('python', data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.assertIn('"fields": {"dt": "2011-09-01T17:20:30+07:00"}', data) obj = serializers.deserialize('json', data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize('xml', [Event(dt=dt)]) self.assertIn( '<field type="DateTimeField" name="dt">2011-09-01T17:20:30+07:00</field>', data) obj = serializers.deserialize('xml', data).next().object self.assertEqual(obj.dt, dt) if 'yaml' in serializers.get_serializer_formats(): data = serializers.serialize('yaml', [Event(dt=dt)]) self.assertIn( "- fields: {dt: !!timestamp '2011-09-01 17:20:30+07:00'}", data) obj = serializers.deserialize('yaml', data).next().object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def __new__(cls, name, bases, attrs): """Extends base test functions to be called for every serialisation format. Looks for functions matching 'run.*Test', where the wildcard in the middle matches the desired test name and ensures that a test case is setup to call that function once for every defined serialisation format. The test case that is created will be called 'test<format><name>'. Eg, for the function 'runKeyedObjectTest' functions like 'testJsonKeyedObject' will be created. """ test_formats = serializers.get_serializer_formats() test_formats.remove("python") # Python serializer is only used indirectly. for func_name in attrs.keys(): m = re.match("^run(.*)Test$", func_name) if not m: continue for format in test_formats: test_name = "test%s%s" % (format.title(), m.group(1)) test_func = eval("lambda self: getattr(self, \"%s\")(\"%s\")" % (func_name, format)) attrs[test_name] = test_func # ensure keys match the current app ID by populating them dynamically: obj = ModelA(key_name="test") obj.put() pk = obj.key() for k, v in attrs['SERIALIZED_WITH_NON_EXISTANT_PARENT'].items(): attrs['SERIALIZED_WITH_NON_EXISTANT_PARENT'][k] = v % str(pk) return super(TestAllFormats, cls).__new__(cls, name, bases, attrs)
def test_naive_datetime_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) data = serializers.serialize('python', [Event(dt=dt)]) self.assertEqual(data[0]['fields']['dt'], dt) obj = serializers.deserialize('python', data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.assertIn('"fields": {"dt": "2011-09-01T13:20:30.405"}', data) obj = serializers.deserialize('json', data).next().object self.assertEqual(obj.dt, dt.replace(microsecond=405000)) data = serializers.serialize('xml', [Event(dt=dt)]) self.assertIn( '<field type="DateTimeField" name="dt">2011-09-01T13:20:30.405060</field>', data) obj = serializers.deserialize('xml', data).next().object self.assertEqual(obj.dt, dt) if 'yaml' in serializers.get_serializer_formats(): data = serializers.serialize('yaml', [Event(dt=dt)]) self.assertIn( "- fields: {dt: !!timestamp '2011-09-01 13:20:30.405060'}", data) obj = serializers.deserialize('yaml', data).next().object self.assertEqual(obj.dt, dt)
def __init__(self, path, match=None, **kwargs): match = match or ( '(?i)^.+(%s)$' % '|'.join( ['\.%s' % ext for ext in get_serializer_formats()]) ) # Generate a regex string like: (?i)^.+(\.xml|\.json)$ super(FixturePathField, self).__init__(path, match=match, **kwargs) if not self.required: del self.choices[0] # Remove the empty option
def clean_file(self): data = self.cleaned_data['file'] if not isinstance(data, InMemoryUploadedFile): return data file_format = data.name.split('.')[-1] if not file_format in get_serializer_formats(): raise forms.ValidationError(_('Invalid file extension.')) return data
def __init__(self, path, match=None, **kwargs): match = match or ( '(?i)^.+(%s)$' % '|'.join(['\.%s' % ext for ext in get_serializer_formats()]) ) # Generate a regex string like: (?i)^.+(\.xml|\.json)$ super(FixturePathField, self).__init__(path, match=match, **kwargs) if not self.required: del self.choices[0] # Remove the empty option
def test_builtin_serializers(self): """ 'geojson' should be listed in available serializers. """ all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn('geojson', all_formats), self.assertIn('geojson', public_formats)
def validate(self, data): super(MultiFixtureField, self).validate(data) for upload in data: file_format = os.path.splitext(upload.name)[1][1:].lower() if file_format not in get_serializer_formats(): raise forms.ValidationError( _('Invalid file extension: .%(extension)s.') % {'extension': file_format}) return data
def test_builtin_serializers(self): """ 'geojson' should be listed in available serializers. """ all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn("geojson", all_formats), self.assertIn("geojson", public_formats)
def validate(self, data): super(MultiFixtureField, self).validate(data) for upload in data: file_format = os.path.splitext(upload.name)[1][1:].lower() if file_format not in get_serializer_formats(): raise forms.ValidationError( _('Invalid file extension: .%(extension)s.') % { 'extension': file_format }) return data
def register_tests(test_class, method_name, test_func, exclude=()): """ Dynamically create serializer tests to ensure that all registered serializers are automatically tested. """ for format_ in serializers.get_serializer_formats(): if format_ == 'geojson' or format_ in exclude: continue decorated_func = skipIf( isinstance(serializers.get_serializer(format_), serializers.BadSerializer), 'The Python library for the %s serializer is not installed.' % format_, )(test_func) setattr(test_class, method_name % format_, partialmethod(decorated_func, format_))
def test_builtin_serializers(self): "Requesting a list of serializer formats popuates the registry" all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn('xml', all_formats), self.assertIn('xml', public_formats) self.assertIn('json2', all_formats) self.assertIn('json2', public_formats) self.assertIn('python', all_formats) self.assertNotIn('python', public_formats)
def test_builtin_serializers(self): "Requesting a list of serializer formats popuates the registry" all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertTrue('xml' in all_formats), self.assertTrue('xml' in public_formats) self.assertTrue('json2' in all_formats) self.assertTrue('json2' in public_formats) self.assertTrue('python' in all_formats) self.assertFalse('python' in public_formats)
def register_tests(test_class, method_name, test_func, exclude=None): """ Dynamically create serializer tests to ensure that all registered serializers are automatically tested. """ formats = [ f for f in serializers.get_serializer_formats() if (not isinstance(serializers.get_serializer(f), serializers.BadSerializer) and f != 'geojson' and (exclude is None or f not in exclude)) ] for format_ in formats: setattr(test_class, method_name % format_, partialmethod(test_func, format_))
def test_builtin_serializers(self): "Requesting a list of serializer formats populates the registry" all_formats = set(serializers.get_serializer_formats()) public_formats = set(serializers.get_public_serializer_formats()) self.assertIn("xml", all_formats), self.assertIn("xml", public_formats) self.assertIn("json2", all_formats) self.assertIn("json2", public_formats) self.assertIn("python", all_formats) self.assertNotIn("python", public_formats)
def register_tests(test_class, method_name, test_func, exclude=None): """ Dynamically create serializer tests to ensure that all registered serializers are automatically tested. """ formats = [ f for f in serializers.get_serializer_formats() if (not isinstance(serializers.get_serializer(f), serializers.BadSerializer) and f != 'geojson' and ( exclude is None or f not in exclude)) ] for format_ in formats: setattr(test_class, method_name % format_, curry(test_func, format_))
def test_aware_datetime_in_other_timezone(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) data = serializers.serialize('python', [Event(dt=dt)]) self.assertEqual(data[0]['fields']['dt'], dt) obj = serializers.deserialize('python', data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.assertIn('"fields": {"dt": "2011-09-01T17:20:30+07:00"}', data) obj = serializers.deserialize('json', data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize('xml', [Event(dt=dt)]) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T17:20:30+07:00</field>', data) obj = serializers.deserialize('xml', data).next().object self.assertEqual(obj.dt, dt) if 'yaml' in serializers.get_serializer_formats(): data = serializers.serialize('yaml', [Event(dt=dt)]) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 17:20:30+07:00'}", data) obj = serializers.deserialize('yaml', data).next().object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_other_timezone(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, tzinfo=ICT) data = serializers.serialize('python', [Event(dt=dt)]) self.assert_python_contains_datetime(data, dt) obj = next(serializers.deserialize('python', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.assert_json_contains_datetime(data, "2011-09-01T17:20:30+07:00") obj = next(serializers.deserialize('json', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('xml', [Event(dt=dt)]) self.assert_xml_contains_datetime(data, "2011-09-01T17:20:30+07:00") obj = next(serializers.deserialize('xml', data)).object self.assertEqual(obj.dt, dt) if 'yaml' in serializers.get_serializer_formats(): data = serializers.serialize('yaml', [Event(dt=dt)]) self.assert_yaml_contains_datetime(data, "2011-09-01 17:20:30+07:00") obj = next(serializers.deserialize('yaml', data)).object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_naive_datetime(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30) data = serializers.serialize("python", [Event(dt=dt)]) self.assertEqual(data[0]["fields"]["dt"], dt) obj = serializers.deserialize("python", data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assertIn('"fields": {"dt": "2011-09-01T13:20:30"}', data) obj = serializers.deserialize("json", data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize("xml", [Event(dt=dt)]) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30</field>', data) obj = serializers.deserialize("xml", data).next().object self.assertEqual(obj.dt, dt) if "yaml" in serializers.get_serializer_formats(): data = serializers.serialize("yaml", [Event(dt=dt)]) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30'}", data) obj = serializers.deserialize("yaml", data).next().object self.assertEqual(obj.dt, dt)
def __new__(cls, name, bases, attrs): """Extends base test functions to be called for every serialisation format. Looks for functions matching 'run.*Test', where the wildcard in the middle matches the desired test name and ensures that a test case is setup to call that function once for every defined serialisation format. The test case that is created will be called 'test<format><name>'. Eg, for the function 'runKeyedObjectTest' functions like 'testJsonKeyedObject' will be created. """ test_formats = serializers.get_serializer_formats() test_formats.remove("python") # Python serializer is only used indirectly. for func_name in attrs.keys(): m = re.match("^run(.*)Test$", func_name) if not m: continue for format in test_formats: test_name = "test%s%s" % (format.title(), m.group(1)) test_func = eval("lambda self: getattr(self, \"%s\")(\"%s\")" % (func_name, format)) attrs[test_name] = test_func return super(TestAllFormats, cls).__new__(cls, name, bases, attrs)
def test_aware_datetime_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 17, 20, 30, 405060, tzinfo=ICT) data = serializers.serialize("python", [Event(dt=dt)]) self.assertEqual(data[0]["fields"]["dt"], dt) obj = serializers.deserialize("python", data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assertIn('"fields": {"dt": "2011-09-01T17:20:30.405+07:00"}', data) obj = serializers.deserialize("json", data).next().object self.assertEqual(obj.dt, dt.replace(microsecond=405000)) data = serializers.serialize("xml", [Event(dt=dt)]) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T17:20:30.405060+07:00</field>', data) obj = serializers.deserialize("xml", data).next().object self.assertEqual(obj.dt, dt) if "yaml" in serializers.get_serializer_formats(): data = serializers.serialize("yaml", [Event(dt=dt)]) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 17:20:30.405060+07:00'}", data) obj = serializers.deserialize("yaml", data).next().object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_aware_datetime_in_local_timezone(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, tzinfo=EAT) data = serializers.serialize("python", [Event(dt=dt)]) self.assertEqual(data[0]["fields"]["dt"], dt) obj = serializers.deserialize("python", data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize("json", [Event(dt=dt)]) self.assertIn('"fields": {"dt": "2011-09-01T13:20:30+03:00"}', data) obj = serializers.deserialize("json", data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize("xml", [Event(dt=dt)]) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30+03:00</field>', data) obj = serializers.deserialize("xml", data).next().object self.assertEqual(obj.dt, dt) if "yaml" in serializers.get_serializer_formats(): data = serializers.serialize("yaml", [Event(dt=dt)]) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30+03:00'}", data) obj = serializers.deserialize("yaml", data).next().object self.assertEqual(obj.dt.replace(tzinfo=UTC), dt)
def test_naive_datetime_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) data = serializers.serialize('python', [Event(dt=dt)]) self.assertEqual(data[0]['fields']['dt'], dt) obj = serializers.deserialize('python', data).next().object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.assertIn('"fields": {"dt": "2011-09-01T13:20:30.405"}', data) obj = serializers.deserialize('json', data).next().object self.assertEqual(obj.dt, dt.replace(microsecond=405000)) data = serializers.serialize('xml', [Event(dt=dt)]) self.assertIn('<field type="DateTimeField" name="dt">2011-09-01T13:20:30.405060</field>', data) obj = serializers.deserialize('xml', data).next().object self.assertEqual(obj.dt, dt) if 'yaml' in serializers.get_serializer_formats(): data = serializers.serialize('yaml', [Event(dt=dt)]) self.assertIn("- fields: {dt: !!timestamp '2011-09-01 13:20:30.405060'}", data) obj = serializers.deserialize('yaml', data).next().object self.assertEqual(obj.dt, dt)
def test_naive_datetime_with_microsecond(self): dt = datetime.datetime(2011, 9, 1, 13, 20, 30, 405060) data = serializers.serialize('python', [Event(dt=dt)]) self.assert_python_contains_datetime(data, dt) obj = next(serializers.deserialize('python', data)).object self.assertEqual(obj.dt, dt) data = serializers.serialize('json', [Event(dt=dt)]) self.assert_json_contains_datetime(data, "2011-09-01T13:20:30.405") obj = next(serializers.deserialize('json', data)).object self.assertEqual(obj.dt, dt.replace(microsecond=405000)) data = serializers.serialize('xml', [Event(dt=dt)]) self.assert_xml_contains_datetime(data, "2011-09-01T13:20:30.405060") obj = next(serializers.deserialize('xml', data)).object self.assertEqual(obj.dt, dt) if 'yaml' in serializers.get_serializer_formats(): data = serializers.serialize('yaml', [Event(dt=dt)]) self.assert_yaml_contains_datetime(data, "2011-09-01 13:20:30.405060") obj = next(serializers.deserialize('yaml', data)).object self.assertEqual(obj.dt, dt)
def __new__(cls, name, bases, attrs): """Extends base test functions to be called for every serialisation format. Looks for functions matching 'run.*Test', where the wildcard in the middle matches the desired test name and ensures that a test case is setup to call that function once for every defined serialisation format. The test case that is created will be called 'test<format><name>'. Eg, for the function 'runKeyedObjectTest' functions like 'testJsonKeyedObject' will be created. """ test_formats = serializers.get_serializer_formats() test_formats.remove( "python") # Python serializer is only used indirectly. for func_name in attrs.keys(): m = re.match("^run(.*)Test$", func_name) if not m: continue for format in test_formats: test_name = "test%s%s" % (format.title(), m.group(1)) test_func = eval("lambda self: getattr(self, \"%s\")(\"%s\")" % (func_name, format)) attrs[test_name] = test_func return super(TestAllFormats, cls).__new__(cls, name, bases, attrs)
class Export(forms.Form): export_format = forms.ChoiceField( choices=[(format, format) for format in serializers.get_serializer_formats()], required=False, label='Format', help_text='Designates export format.', ) export_fields = forms.MultipleChoiceField( choices=[('asc', 'Ascending'), ('desc', 'Descending')], required=False, label='Fields', help_text="Fields to be included in the exported data. If none are \ selected all fields will be exported. Hold down 'Control', \ or 'Command' on a Mac, to select more than one.", ) export_order_by = forms.ChoiceField( required=False, label='Order by', help_text='Field to use for export ordering.', ) export_order_direction = forms.ChoiceField( choices=[('asc', 'Ascending'), ('dsc', 'Descending')], required=False, label='Order direction', help_text='Sort elements in ascending or descending order.', ) def __init__(self, model, *args, **kwargs): super(Export, self).__init__(*args, **kwargs) self.fieldsets = (('Options', { 'fields': ('export_format', 'export_fields', 'export_order_by', 'export_order_direction') }), ('Filters', { 'description': 'Objects will be filtered to match the criteria \ as specified in the fields below. If a value is not specified \ for a field the field is ignored during the filter process.', 'fields': [] })) field_choices = [] form_fields = forms.models.fields_for_model(model) for field in model._meta.fields: name = field.name if name not in form_fields.keys(): continue form_field = form_fields[name] if form_field.__class__ in [ forms.models.ModelChoiceField, forms.models.ModelMultipleChoiceField ]: self.fields[name] = getattr(fields, field.__class__.__name__)( form_field, form_field.queryset) else: try: self.fields[name] = getattr( fields, field.__class__.__name__)(form_field) except AttributeError: for parent_field in inspect.getmro(field.__class__): if parent_field.__module__ == \ 'django.db.models.fields': self.fields[name] = getattr( fields, parent_field.__name__)(form_field) break if name not in self.fieldsets[1][1]['fields']: self.fieldsets[1][1]['fields'].append(name) field_choices.append([name, form_field.label.capitalize()]) self.fields['export_fields'].choices = field_choices self.fields['export_order_by'].choices = field_choices
def plugin(request, query=None): """ https://biogps-dev.gnf.org/plugin_v1/159/ https://biogps-dev.gnf.org/plugin_v1/1-5/ https://biogps-dev.gnf.org/plugin_v1/1,10,24/ https://biogps-dev.gnf.org/plugin_v1/all/ https://biogps-dev.gnf.org/plugin_v1/?start=0&limit=20&scope=all&sort=created&dir=DESC https://biogps-dev.gnf.org/plugin_v1/?start=0&limit=20&scope=shared&sort=created&dir=DESC&search=ncbi+gene https://biogps-dev.gnf.org/plugin_v1/?start=0&limit=20&scope=shared&sort=created&dir=DESC&tags=ncbi+gene sortable fields: author, created, lastmodified, title, type, url, popularity searchable fields: author, url, description, type, tags """ # if request.adamuser.is_anonymous(): # return HttpResponse(json.dumps(ANONYMOUS_USER_ERROR), content_type=MIMETYPE['json']) sortable_fields = ['author', 'created', 'lastmodified', 'title', 'type', 'url', 'popularity'] if request.method == 'GET': sort_order = smart_unicode(request.GET.get('dir', 'DESC')) sort_by = smart_unicode(request.GET.get('mysort', 'lastmodified')).strip().lower() if sort_by not in sortable_fields: return HttpResponseBadRequest('unknown "sort" field "%s".' % escape(sort_by)) if sort_by == 'popularity': sort_by = 'popularity__score' sort_order = sort_order.strip() sort_by = sort_by.strip() if sort_order.upper() == 'DESC': sort_by = '-' + sort_by scope = smart_unicode(request.GET.get('scope', 'all')).lower() if scope == 'my': #_dbobjects = BiogpsPlugin.objects.filter(authorid=request.adamuser.sid) #_dbobjects = get_my_plugins(request.adamuser) _dbobjects = get_my_plugins(request.user) elif scope == 'shared': _dbobjects = get_shared_plugins(request.user) else: #scope == 'all': _dbobjects = get_my_plugins(request.user) | get_shared_plugins(request.user) if request.GET.has_key('search'): #search_term_li = re.split('\s+', smart_unicode(request.GET['search']).strip().replace("'",r"\'")) search_string = smart_unicode(re.sub('[^\w-]', ' ', request.GET['search'])).strip() search_term_li = re.split('\s+', search_string) _searchquery = eval(' & '.join(['(%s)' % q for q in [' | '.join(["Q(%s__icontains='%s')" % (field, term) for field in ['title', 'url', 'description', 'type', 'author']]) for term in search_term_li]])) #_dbobjects = _dbobjects.order_by(sort_by).filter(_searchquery) _dbobjects = (_dbobjects.filter(_searchquery) | \ TaggedItem.objects.get_union_by_model(_dbobjects, Tag.objects.filter(name__in=search_term_li))) \ .order_by(sort_by, 'pk') # add secondary sorting by pk to avoid unexpected order in case of ties #.filter(_searchquery).order_by(sort_by, 'pk') # add secondary sorting by pk to avoid unexpected order in case of ties if request.GET.has_key('tags'): _tags = re.split('\s+', smart_unicode(request.GET['tags']).strip()) tags = Tag.objects.filter(name__in=_tags) _dbobjects = TaggedItem.objects.get_union_by_model(_dbobjects, tags) else: #_dbobjects = BiogpsPlugin.objects.order_by(sort_by) _dbobjects = _dbobjects.order_by(sort_by, 'pk') # add secondary sorting by pk to avoid unexpected order in case of ties if sort_by.endswith('popularity__score'): _dbobjects = _dbobjects.filter(popularity__score__isnull=False) if request.GET.has_key('q'): query = smart_unicode(request.GET['q']) elif request.GET.has_key('query'): query = smart_unicode(request.GET['query']) if query: query = query.strip() if query.lower() == 'all': #query_result = getall(request.adamuser) query_result = _dbobjects else: if query.find(',') != -1: plugin_id = [x.strip() for x in query.split(',')] for x in plugin_id: if not x.isdigit(): return HttpResponseBadRequest('Invalid input parameters "%s".' % escape(query)) elif query.find('-') != -1: try: start, end = [int(x) for x in query.split('-')][:2] except ValueError: return HttpResponseBadRequest('Invalid input parameters "%s".' % escape(query)) plugin_id = [str(x) for x in range(start, end + 1)] elif query.isdigit(): plugin_id = [query] else: return HttpResponseBadRequest('Invalid input parameters "%s".' % escape(query)) query_result = _dbobjects.filter(pk__in=plugin_id) #query_total_cnt = len(query_result) query_total_cnt = query_result.count() elif request.GET.has_key('start'): start = request.GET['start'] limit = request.GET.get('limit', _dbobjects.count()) start = int(start) limit = int(limit) #query_result = BiogpsPlugin.objects.order_by(sort_by)[start:start+limit] query_result = _dbobjects[start:start + limit] query_total_cnt = _dbobjects.count() elif request.GET.has_key('search'): #in case that only query parameter is used. query_result = _dbobjects query_total_cnt = _dbobjects.count() else: return HttpResponseBadRequest('Missing required parameter.') # Append extra attributes to each object, to be passed down in the JSON stream. for p in query_result: p.author = p.owner.get_valid_name() # although plugin object has author field, but here we get author name from user table on the fly p.author_url = p.owner.get_absolute_url() p.is_shared = (p.owner != request.user) p.usage_percent = p.usage_percent() p.usage_layout_count = p.popularity.score p.usage_ranking = p.popularity.rank p.usage_users = p.popularity.users_count p.related_plugins = p.popularity.related_plugins extra_itemfields = ['author', 'author_url', 'is_shared', 'usage_percent', 'usage_percent', 'usage_layout_count', 'usage_ranking', 'usage_users', 'related_plugins'] format = request.GET.get('format', 'json') if format not in get_serializer_formats(): format = 'json' if format == 'json': #using specialized jsonserializer return HttpResponse(serialize('myjson', query_result, extra_fields={'totalCount': query_total_cnt}, extra_itemfields=extra_itemfields), content_type=MIMETYPE.get(format, None)) else: return HttpResponse(serialize(STD_FORMAT.get(format, format), query_result), content_type=MIMETYPE.get(format, None)) #return HttpResponse(serialize(STD_FORMAT.get(format, format), query_result),content_type=MIMETYPE.get(format, None)) elif request.method == 'POST': # authorid = request.adamuser.sid # author = request.adamuser.name # url = request.POST['url'] # title = request.POST['title'] # type = request.POST['type'] # description = request.POST.get('description', '') # options = request.POST.get('options', '') # # plugin = BiogpsPlugin(title = title, # url = url, # type = type, # authorid = authorid, # author = author, # options=options, # description = description) # plugin.save() # data = {'success': True} # return HttpResponse(json.dumps(data), content_type=MIMETYPE['json']) if query == 'add': return _plugin_add(request) elif query == 'update': return _plugin_update(request) elif query == 'delete': return _plugin_delete(request) else: return HttpResponseBadRequest('Unsupported action "%s"' % escape(query)) elif request.method == 'PUT': #update a record pass elif request.method == 'DELETE': pass else: return HttpResponseBadRequest('Unsupported request method "%s"' % request.method)
def layout(request, query=None): #this should be deprecated. """ /layout/all/ /layout/1/ /layout/3-9/ /layout/1,3,5/ /layout/first/ /layout/last/ optional querystrings for GET: q: can be 1, 1,3,5 3-9 all first last format: json, xml, plainjson, python loadplugin: if true ("1" or "true"), load associated plugin data in returned layout data """ if request.method == 'GET': if request.GET.has_key('q'): query = request.GET['q'] if query: query = query.lower() if query in ['all', 'first', 'last']: if request.user.is_anonymous(): query_result = get_shared_layouts(request.user) else: query_result = getall(request.user, userselectedonly=False) if query_result.count() > 0: if query == 'first': query_result = query_result[0:1] elif query == 'last': query_result = query_result[query_result.count() - 1:] else: try: if query.find(',') != -1: layout_id = [int(x) for x in query.split(',')] elif query.find('-') != -1: start, end = [int(x) for x in query.split('-')][:2] layout_id = [str(x) for x in range(start, end + 1)] else: layout_id = [int(query)] except ValueError: return HttpResponseBadRequest('Invalid input parameters!') query_result = BiogpsGenereportLayout.objects.filter(pk__in=layout_id) loadplugin = (request.GET.get('loadplugin', '').lower() in ['1', 'true']) # if NEW_LAYOUT_MODE: for layout in query_result: layout.author = layout.owner.get_valid_name() layout.is_shared = (layout.owner != request.user) layout.loadplugin = loadplugin extra_itemfields = ['author', 'is_shared', 'layout_data'] query_total_cnt = query_result.count() #logging layout access log.info('username=%s clientip=%s action=layout_query id=%s', getattr(request.user, 'username', ''), request.META.get('REMOTE_ADDR', ''), ','.join([str(layout.id) for layout in query_result])) else: return HttpResponseBadRequest('Missing required parameter.') format = request.GET.get('format', 'json') if format not in get_serializer_formats(): format = 'json' if format == 'json': #return HttpResponse(serialize('jsonfix', query_result),content_type=MIMETYPE.get(format, None)) return HttpResponse(serialize('myjson', query_result, extra_fields={'totalCount': query_total_cnt}, extra_itemfields=extra_itemfields), content_type=MIMETYPE.get(format, None)) else: return HttpResponse(serialize(STD_FORMAT.get(format, format), query_result), content_type=MIMETYPE.get(format, None)) elif request.method == 'POST': if request.user.is_anonymous(): return HttpResponse(json.dumps(ANONYMOUS_USER_ERROR), content_type=MIMETYPE['json']) if query == 'add': return _layout_add(request) elif query == 'update': return _layout_update(request) elif query == 'delete': return _layout_delete(request) else: return HttpResponseBadRequest('Unsupported action "%s"' % escape(query)) elif request.method == 'PUT': #update a record if request.user.is_anonymous(): return HttpResponse(json.dumps(ANONYMOUS_USER_ERROR), content_type=MIMETYPE['json']) # authorid = request.user.sid # layout_id = query # qdata = QueryDict(request.raw_post_data) # updatable_fields = ['layout_name', 'layout_data', 'description'] #'permission', # try: # layout = BiogpsGenereportLayout.objects.get(authorid=authorid, id=layout_id) # for f in updatable_fields: # if f in qdata: # if f=='layout_data': # try: # setattr(layout, f, json.loads(qdata[f])) # except ValueError: # return ExtError('Passed "layout_data" is not a valid json string.') # else: # setattr(layout, f, qdata[f]) # layout.save() # data = {'success': True} # except BiogpsGenereportLayout.DoesNotExist: #@UndefinedVariable # return ExtError("Layout does not exist.") # #data = {'success': False, # # 'errors': "Layout does not exist."} # # return HttpResponse(json.dumps(data), content_type=MIMETYPE['json']) elif request.method == 'DELETE': #delete a layout if request.user.is_anonymous(): return HttpResponse(json.dumps(ANONYMOUS_USER_ERROR), content_type=MIMETYPE['json']) # authorid = request.user.sid # layout_id = query # try: # layout = BiogpsGenereportLayout.objects.get(authorid=authorid, id=layout_id) # layout.delete() # data = {'success': True} # except BiogpsGenereportLayout.DoesNotExist: #@UndefinedVariable # return ExtError("Layout does not exist.") # #data = {'success': False, # # 'errors': "Layout does not exist."} # # return HttpResponse(json.dumps(data), content_type=MIMETYPE['json']) # else: return HttpResponseBadRequest('Unsupported request method "%s"' % request.method)
# Serialize then deserialize the test database serialized_data = serializers.serialize(format, [obj], indent=2, fields=('field1','field3')) result = serializers.deserialize(format, serialized_data).next() # Check that the deserialized object contains data in only the serialized fields. self.assertEqual(result.object.field1, 'first') self.assertEqual(result.object.field2, '') self.assertEqual(result.object.field3, 'third') def streamTest(format, self): obj = ComplexModel(field1='first',field2='second',field3='third') obj.save_base(raw=True) # Serialize the test database to a stream stream = StringIO() serializers.serialize(format, [obj], indent=2, stream=stream) # Serialize normally for a comparison string_data = serializers.serialize(format, [obj], indent=2) # Check that the two are the same self.assertEqual(string_data, stream.getvalue()) stream.close() for format in serializers.get_serializer_formats(): setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format)) setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format)) if format != 'python': setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format))
def get_serializers(): return [s for s in serializers.get_serializer_formats() if s.startswith('rest')]
def import_from(self, infile, maxsize=10000000): errors = [] results = [] filename = infile.name raw = infile.read() format = os.path.splitext(filename)[1] if format and format.startswith("."): format = format[1:] if not format: errors.append( _("Could not parse format from filename: %s") % filename) if format == "zip": zf = zipfile.ZipFile(StringIO(raw), "r") files = zf.namelist() image_dir = config_value("PRODUCT", "IMAGE_DIR") other_image_dir = None export_file = None if "VARS" in files: config = zf.read("VARS") lines = [line.split("=") for line in config.split("\n")] for key, val in lines: if key == "PRODUCT.IMAGE_DIR": other_image_dir = val elif key == "EXPORT_FILE": export_file = val if other_image_dir is None or export_file is None: errors.append(_("Bad VARS file in import zipfile.")) else: # save out all the files which start with other_image_dr rename = image_dir == other_image_dir for f in files: if f.startswith(other_image_dir): buf = zf.read(f) if rename: f = f[len(other_image_dir):] if f[0] in ("/", "\\"): f = f[1:] f = os.path.join(settings.MEDIA_ROOT, image_dir, f) outf = open(f, "w") outf.write(buf) outf.close() results.append("Imported image: %s" % f) infile = zf.read(export_file) zf.close() format = os.path.splitext(export_file)[1] if format and format.startswith("."): format = format[1:] if not format: errors.append( _("Could not parse format from filename: %s") % filename) else: raw = infile else: errors.append(_("Missing VARS in import zipfile.")) else: raw = StringIO(str(raw)) if format not in serializers.get_serializer_formats(): errors.append(_("Unknown file format: %s") % format) if not errors: with transaction.atomic(): try: ct = 0 models = set() for obj in serializers.deserialize(format, raw): obj.save() models.add(obj.object.__class__) ct += 1 if ct > 0: style = no_style() sequence_sql = connection.ops.sequence_reset_sql( style, models) if sequence_sql: cursor = connection.cursor() for line in sequence_sql: cursor.execute(line) results.append( _("Added %(count)i objects from %(filename)s") % { "count": ct, "filename": filename }) except Exception as e: errors.append( _("Problem installing fixture '%(filename)s': %(error_msg)s\n" ) % { "filename": filename, "error_msg": str(e) }) errors.append("Raw: %s" % raw) return results, errors
# Serialize the test database to a stream for stream in (six.StringIO(), HttpResponse()): serializers.serialize(format, [obj], indent=2, stream=stream) # Serialize normally for a comparison string_data = serializers.serialize(format, [obj], indent=2) # Check that the two are the same if isinstance(stream, six.StringIO): self.assertEqual(string_data, stream.getvalue()) else: self.assertEqual(string_data, stream.content.decode('utf-8')) for format in [ f for f in serializers.get_serializer_formats() if not isinstance( serializers.get_serializer(f), serializers.BadSerializer) ]: setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format)) setattr(SerializerTests, 'test_' + format + '_natural_key_serializer', curry(naturalKeySerializerTest, format)) setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format)) if format != 'python': setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format)) class XmlDeserializerSecurityTests(TestCase): def test_no_dtd(self):
from django.utils import simplejson from regimun_app.forms import jEditableForm, BasicConferenceInfoForm, \ NewCommitteeForm, NewCountryForm, UploadFileForm, NewPaymentForm, \ delegate_position_form_factory, FeeForm, DatePenaltyForm from regimun_app.models import Conference, Committee, Country, DelegatePosition, \ School, Payment, Fee, DatePenalty from regimun_app.utils import UnicodeCSVDictReader from regimun_app.views.school_admin import is_school_registered from regimun_app.views.secretariat_admin import secretariat_authenticate import csv import exceptions import inspect import settings import string if 'jsondisplay' not in get_serializer_formats(): register_serializer('jsondisplay', 'regimun_app.serializers.DisplayNameJsonSerializer') @login_required def conference_ajax_functions(request, conference_slug, func_name): conference = get_object_or_404(Conference, url_name=conference_slug) func_name = string.replace(func_name, "-", "_") if secretariat_authenticate(request, conference) and func_name in globals() and inspect.isfunction(globals()[func_name]): return_value = globals()[func_name](request, conference) if return_value != None: if isinstance(return_value, HttpResponse): return return_value else: return HttpResponse(return_value, mimetype='application/javascript') #return HttpResponse("<html><body>" + return_value + "</body></html>")
def layoutlist(request, query=None): """ URL: /layoutlist/all/?userselected=1 /layoutlist/all/?scope=my /layoutlist/all/?scope=shared /layoutlist/?search=demo /layoutlist/?search=demo&start=20&limit=10 """ if request.method == 'GET': sort_order = request.GET.get('dir', 'DESC') sort_by = request.GET.get('sort', 'lastmodified') sort_order = sort_order.strip() sort_by = sort_by.strip() if sort_order.upper() == 'DESC': sort_by = '-' + sort_by scope = request.GET.get('scope', 'all').lower() # userselectedonly = (request.GET.get('userselected','') == '1') userselectedonly = False ##############TMP disable############################################# # tmp fix here, so that all user can pick up whatever is available ### ###################################################################### #First, get all available layouts based on scope if (request.user.is_anonymous()): _dbobjects = get_shared_layouts(request.user, userselectedonly=False) else: if scope == 'my': _dbobjects = get_my_layouts(request.user) elif scope == 'shared': _dbobjects = get_shared_layouts(request.user, userselectedonly) else: #scope == 'all': #_dbobjects = get_my_layouts(request.adamuser) | get_shared_layouts(request.adamuser, userselectedonly) _dbobjects = getall(request.user, userselectedonly) #filter layouts based on search parameter if request.GET.has_key('search'): search_term = request.GET['search'].strip() _dbobjects = _dbobjects.order_by(sort_by).filter(Q(layout_name__icontains=search_term) | Q(description__icontains=search_term) | Q(author__icontains=search_term)) else: _dbobjects = _dbobjects.order_by(sort_by) if request.GET.has_key('q'): query = request.GET['q'] elif request.GET.has_key('query'): query = request.GET['query'] if query: if query.lower() == 'all': query_result = _dbobjects else: try: if query.find(',') != -1: layout_id = [int(x) for x in query.split(',')] elif query.find('-') != -1: start, end = [int(x) for x in query.split('-')][:2] layout_id = [str(x) for x in range(start, end + 1)] else: layout_id = [int(query)] except ValueError: return HttpResponseBadRequest('Invalid input parameters!') query_result = _dbobjects.filter(pk__in=layout_id) query_total_cnt = query_result.count() elif request.GET.has_key('start'): start = request.GET['start'] limit = request.GET.get('limit', _dbobjects.count()) start = int(start) limit = int(limit) #query_result = BiogpsPlugin.objects.order_by(sort_by)[start:start+limit] query_result = _dbobjects[start:start + limit] query_total_cnt = _dbobjects.count() elif request.GET.has_key('search'): #in case that only query parameter is used. query_result = _dbobjects query_total_cnt = _dbobjects.count() else: return HttpResponseBadRequest('Missing required parameter.') for layout in query_result: layout.author = layout.owner.get_valid_name() layout.is_shared = (layout.owner != request.user) extra_itemfields = ['author', 'is_shared', 'layout_data'] format = request.GET.get('format', 'json') if format not in get_serializer_formats(): format = 'json' if format == 'json': #using specialized jsonserializer return HttpResponse(serialize('myjson', query_result, extra_fields={'totalCount': query_total_cnt}, extra_itemfields=extra_itemfields), content_type=MIMETYPE.get(format, None)) else: return HttpResponse(serialize(STD_FORMAT.get(format, format), query_result), content_type=MIMETYPE.get(format, None))
NewPaymentForm, delegate_position_form_factory, FeeForm, DatePenaltyForm, ) from regimun_app.models import Conference, Committee, Country, DelegatePosition, School, Payment, Fee, DatePenalty from regimun_app.utils import UnicodeCSVDictReader from regimun_app.views.school_admin import is_school_registered from regimun_app.views.secretariat_admin import secretariat_authenticate import csv import exceptions import inspect import settings import string if "jsondisplay" not in get_serializer_formats(): register_serializer("jsondisplay", "regimun_app.serializers.DisplayNameJsonSerializer") @login_required def conference_ajax_functions(request, conference_slug, func_name): conference = get_object_or_404(Conference, url_name=conference_slug) func_name = string.replace(func_name, "-", "_") if ( secretariat_authenticate(request, conference) and func_name in globals() and inspect.isfunction(globals()[func_name]) ): return_value = globals()[func_name](request, conference) if return_value != None:
def json_deserializer(): get_serializer_formats() from django.core.serializers import _serializers register_serializer('json', 'tb_website.serializers', _serializers)
self.assertEqual(result.object.field3, 'third') def streamTest(format, self): # Clear the database first management.call_command('flush', verbosity=0, interactive=False) obj = ComplexModel(field1='first', field2='second', field3='third') obj.save_base(raw=True) # Serialize the test database to a stream stream = StringIO() serializers.serialize(format, [obj], indent=2, stream=stream) # Serialize normally for a comparison string_data = serializers.serialize(format, [obj], indent=2) # Check that the two are the same self.assertEqual(string_data, stream.getvalue()) stream.close() for format in serializers.get_serializer_formats(): setattr(SerializerTests, 'test_' + format + '_serializer', curry(serializerTest, format)) setattr(SerializerTests, 'test_' + format + '_serializer_fields', curry(fieldsTest, format)) if format != 'python': setattr(SerializerTests, 'test_' + format + '_serializer_stream', curry(streamTest, format))
def import_from(self, infile, maxsize=10000000): errors = [] results = [] filename = infile.name raw = infile.read() format = os.path.splitext(filename)[1] if format and format.startswith('.'): format = format[1:] if not format: errors.append( _('Could not parse format from filename: %s') % filename) if format == 'zip': zf = zipfile.ZipFile(StringIO(raw), 'r') files = zf.namelist() image_dir = config_value('PRODUCT', 'IMAGE_DIR') other_image_dir = None export_file = None if 'VARS' in files: config = zf.read('VARS') lines = [line.split('=') for line in config.split('\n')] for key, val in lines: if key == 'PRODUCT.IMAGE_DIR': other_image_dir = val elif key == 'EXPORT_FILE': export_file = val if other_image_dir is None or export_file is None: errors.append(_('Bad VARS file in import zipfile.')) else: # save out all the files which start with other_image_dr rename = image_dir == other_image_dir for f in files: if f.startswith(other_image_dir): buf = zf.read(f) if rename: f = f[len(other_image_dir):] if f[0] in ('/', '\\'): f = f[1:] f = os.path.join(settings.MEDIA_ROOT, image_dir, f) outf = open(f, 'w') outf.write(buf) outf.close() results.append('Imported image: %s' % f) infile = zf.read(export_file) zf.close() format = os.path.splitext(export_file)[1] if format and format.startswith('.'): format = format[1:] if not format: errors.append( _('Could not parse format from filename: %s') % filename) else: raw = infile else: errors.append(_('Missing VARS in import zipfile.')) else: raw = StringIO(str(raw)) if format not in serializers.get_serializer_formats(): errors.append(_('Unknown file format: %s') % format) if not errors: with transaction.atomic(): try: ct = 0 models = set() for obj in serializers.deserialize(format, raw): obj.save() models.add(obj.object.__class__) ct += 1 if ct > 0: style = no_style() sequence_sql = connection.ops.sequence_reset_sql( style, models) if sequence_sql: cursor = connection.cursor() for line in sequence_sql: cursor.execute(line) results.append( _('Added %(count)i objects from %(filename)s') % { 'count': ct, 'filename': filename }) except Exception, e: errors.append( _("Problem installing fixture '%(filename)s': %(error_msg)s\n" ) % { 'filename': filename, 'error_msg': str(e) }) errors.append("Raw: %s" % raw)
from regimun_app.forms import jEditableForm, BasicConferenceInfoForm, \ NewCommitteeForm, NewCountryForm, UploadFileForm, NewPaymentForm, \ delegate_position_form_factory, FeeForm, DatePenaltyForm from regimun_app.models import Conference, Committee, Country, DelegatePosition, \ School, Payment, Fee, DatePenalty from regimun_app.utils import UnicodeCSVDictReader from regimun_app.views.school_admin import is_school_registered from regimun_app.views.secretariat_admin import secretariat_authenticate import csv import exceptions import inspect import string from django.conf import settings if 'jsondisplay' not in get_serializer_formats(): register_serializer('jsondisplay', 'regimun_app.serializers.DisplayNameJsonSerializer') @login_required def conference_ajax_functions(request, conference_slug, func_name): conference = get_object_or_404(Conference, url_name=conference_slug) func_name = string.replace(func_name, "-", "_") if secretariat_authenticate(request, conference) and func_name in globals() and inspect.isfunction(globals()[func_name]): return_value = globals()[func_name](request, conference) if return_value != None: if isinstance(return_value, HttpResponse): return return_value else: return HttpResponse(return_value, mimetype='application/javascript') #return HttpResponse("<html><body>" + return_value + "</body></html>")
def export_choices(): fmts = serializers.get_serializer_formats() return zip(fmts,fmts)
def export_choices(): fmts = serializers.get_serializer_formats() return zip(fmts, fmts)
def import_from(self, infile, maxsize=10000000): errors = [] results = [] filetype = infile.content_type filename = infile.name raw = infile.read() # filelen = len(raw) # if filelen > maxsize: # errors.append(_('Import too large, must be smaller than %i bytes.' % maxsize )) format = os.path.splitext(filename)[1] if format and format.startswith('.'): format = format[1:] if not format: errors.append(_('Could not parse format from filename: %s') % filename) if format == 'zip': zf = zipfile.ZipFile(StringIO(raw), 'r') files = zf.namelist() image_dir = config_value('PRODUCT', 'IMAGE_DIR') other_image_dir = None export_file = None if 'VARS' in files: config = zf.read('VARS') lines = [line.split('=') for line in config.split('\n')] for key, val in lines: if key == 'PRODUCT.IMAGE_DIR': other_image_dir = val elif key == 'EXPORT_FILE': export_file = val if other_image_dir is None or export_file is None: errors.append(_('Bad VARS file in import zipfile.')) else: # save out all the files which start with other_image_dr rename = image_dir == other_image_dir for f in files: if f.startswith(other_image_dir): buf = zf.read(f) if rename: f = f[len(other_image_dir):] if f[0] in ('/', '\\'): f = f[1:] f = os.path.join(settings.MEDIA_ROOT, image_dir, f) outf = open(f, 'w') outf.write(buf) outf.close() results.append('Imported image: %s' % f) infile = zf.read(export_file) zf.close() format = os.path.splitext(export_file)[1] if format and format.startswith('.'): format = format[1:] if not format: errors.append(_('Could not parse format from filename: %s') % filename) else: raw = infile else: errors.append(_('Missing VARS in import zipfile.')) else: raw = StringIO(str(raw)) if not format in serializers.get_serializer_formats(): errors.append(_('Unknown file format: %s') % format) if not errors: from django.db import connection, transaction transaction.commit_unless_managed() transaction.enter_transaction_management() transaction.managed(True) try: ct = 0 models = set() for obj in serializers.deserialize(format, raw): obj.save() models.add(obj.object.__class__) ct += 1 if ct>0: style=no_style() sequence_sql = connection.ops.sequence_reset_sql(style, models) if sequence_sql: cursor = connection.cursor() for line in sequence_sql: cursor.execute(line) results.append(_('Added %(count)i objects from %(filename)s') % {'count': ct, 'filename': filename}) transaction.commit() #label_found = True except Exception, e: #fixture.close() errors.append(_("Problem installing fixture '%(filename)s': %(error_msg)s\n") % {'filename': filename, 'error_msg': str(e)}) errors.append("Raw: %s" % raw) transaction.rollback() transaction.leave_transaction_management() else: transaction.leave_transaction_management()