Esempio n. 1
0
def process_batch(key, batch_info, iv):
    """Loops through items in a batch and processes them."""
    batch_info = pickle.loads(decrypt(key, batch_info, iv.decode('hex')))
    success = True


    if 'dependencies' in batch_info:
        dependencies = serializers.deserialize('json', batch_info['dependencies'])
        for dep in dependencies:
            dep.save()

    if 'update' in batch_info:
        updates = serializers.deserialize('json', batch_info['update'])
        for item in updates:
            with reversion.create_revision():
                item.save()

    if 'deletions' in batch_info:
        deletions = json.loads(batch_info['deletions'])
        for deletion in deletions:
            app_label, model_label, object_id = deletion
            ct = ContentType.objects.get_by_natural_key(app_label, model_label)
            for result in ct.model_class().objects.filter(pk=object_id):
                with reversion.create_revision():
                    result.delete()
                    
    return success
Esempio n. 2
0
    def get_stock_history(self):
        stock_history = []

        for sh in StockUpdateHistory.objects.filter(stock=self).order_by('-datetime_created'):
            history_info = json.loads(sh.history_info)

            data = {}
            data['previous_state'] = {}
            data['new_state'] = {}
            data['datetime_created'] = str(defaultfilters.date(sh.datetime_created, "Y-m-d H:m"))
            data['reason'] = history_info['reason']

            for obj in serializers.deserialize("json", history_info['previous_state']):
                data['previous_state']['quantity'] = str(format_number(obj.object.quantity))
                data['previous_state']['stock'] = str(format_number(obj.object.stock))
                data['previous_state']['left_stock'] = str(format_number(obj.object.left_stock))
                data['previous_state']['purchase_price'] = str(format_number(obj.object.purchase_price))

            for obj in serializers.deserialize("json", history_info['new_state']):
                data['new_state']['quantity'] = str(format_number(obj.object.quantity))
                data['new_state']['stock'] = str(format_number(obj.object.stock))
                data['new_state']['left_stock'] = str(format_number(obj.object.left_stock))
                data['new_state']['purchase_price'] = str(format_number(obj.object.purchase_price))

            stock_history.append(data)

        return stock_history
Esempio n. 3
0
def _process_history_text(history, text):
    key_values = {
        'content_object': history.content_object,
        'datetime': history.datetime
    }

    loaded_dictionary = json.loads(history.dictionary)

    new_dict = {}
    for key, values in loaded_dictionary.items():
        value_type = pickle.loads(str(values['type']))
        if isinstance(value_type, models.base.ModelBase):
            for deserialized in serializers.deserialize('json', values['value']):
                new_dict[key] = deserialized.object
        elif isinstance(value_type, models.query.QuerySet):
            qs = []
            for deserialized in serializers.deserialize('json', values['value']):
                qs.append(deserialized.object)
            new_dict[key] = qs
        else:
            new_dict[key] = json.loads(values['value'])

    key_values.update(new_dict)
    expressions_dict = {}

    for key, value in history.get_expressions().items():
        try:
            expressions_dict[key] = eval(value, key_values.copy())
        except Exception, e:
            expressions_dict[key] = e
def processquerycadist(request):
    
    # Get query result from garfield server
    gojson = gopq(request).content
        
    # Re-create Post objects ... to get the time 
    posts = []
    for p in serializers.deserialize("json", gojson):
        posts.append(p.object)

    # Get time that is reported from Gafrield & Odie
    time_diff = posts.pop(0).text

    # Get posts from Minerva SQL for the text
    mjson = processquery(request).content

    # Re-create Post objects ... to get the time 
    posts = []
    for p in serializers.deserialize("json", mjson):
        posts.append(p.object)

    # Substitute with the right time
    p = posts.pop(0)
    posts.insert(0, Post(text=time_diff))

    # Return JSON response of query logic
    data = serializers.serialize("json", posts)
    return HttpResponse(data, content_type="application/json")
Esempio n. 5
0
 def runTest(self):
     logger = logging.getLogger("TestWSGI TestPut")
     h = httplib2.Http()
     url = URL_BASE + '1/'
     
     for input in bad_json_strings:
         logger.info('accessing: ' + url)
         response, content = h.request(url, 'PUT', input, headers=json_headers)
         
         self.assertTrue(response.status == HTTP_BAD_REQUEST)
         logger.info('got expected error message: ' + content)
         
     logger.info('get current description')
     response, content = h.request(url,'GET', headers=json_headers)
     self.assertTrue(response.status == HTTP_OK)
     get_event = serializers.deserialize('json',content).next().object
     self.assertTrue(get_event.description != NEW_DESCRIPTION)
     
     logger.info('putting new description in place')
     response, content = h.request(url, 'PUT', good_json_string, headers=json_headers)
     self.assertTrue(response.status == HTTP_OK)
     
     logger.info('checking for new description')
     response, content = h.request(url,'GET', headers=json_headers)
     self.assertTrue(response.status == HTTP_OK)
     get_event = serializers.deserialize('json',content).next().object
     self.assertTrue(get_event.description == NEW_DESCRIPTION)
Esempio n. 6
0
def jsonBack(json):
    """
    还原Json数据
    """
    if json[0] == '[':
        return deserialize('json', json)
    else:
        return deserialize('json', '[' + json + ']')
Esempio n. 7
0
 def test_invalid_json(self):
     # invalid json data {] in the json and default_json fields
     ser = '[{"pk": 1, "model": "jsonfield.jsoncharmodel", ' \
         '"fields": {"json": "{]", "default_json": "{]"}}]'
     with self.assertRaises(DeserializationError) as cm:
         deserialize('json', ser).next()
     inner = cm.exception.args[0]
     self.assertTrue(isinstance(inner, ValidationError))
     self.assertEquals('Enter valid JSON', inner.messages[0])
Esempio n. 8
0
def Deserializer(stream_or_string, **options):
    """
    Deserialize a stream or string of mako template data.
    """
    from mako.template import Template
    filepath = os.path.splitext(options.get('filepath'))[0]
    # adjust filepath
    options['filepath'] = filepath
    stream_or_string = Template(stream_or_string.read()).render()
    serializers.deserialize(os.path.splitext(filepath)[1], stream_or_string, **options)
Esempio n. 9
0
    def test_serialize_range_with_null(self):
        instance = RangesModel(ints=NumericRange(None, 10))
        data = serializers.serialize('json', [instance])
        new_instance = list(serializers.deserialize('json', data))[0].object
        self.assertEqual(new_instance.ints, NumericRange(None, 10))

        instance = RangesModel(ints=NumericRange(10, None))
        data = serializers.serialize('json', [instance])
        new_instance = list(serializers.deserialize('json', data))[0].object
        self.assertEqual(new_instance.ints, NumericRange(10, None))
Esempio n. 10
0
def evaluation(request, course_short_title=None):
    course = Course.get_or_raise_404(short_title=course_short_title)
    overview = ""
    elaborations = []
    count = 0
    selection = request.session.get("selection", "error")
    if selection not in ("error", "questions"):
        for serialized_elaboration in serializers.deserialize("json", request.session.get("elaborations", {})):
            elaborations.append(serialized_elaboration.object)
        if selection == "search":
            display_points = request.session.get("display_points", "error")
            if display_points == "true":
                user = AuroraUser.objects.get(username=request.session.get("selected_user"))
                points = get_points(request, user, course)
                data = {
                    "elaborations": elaborations,
                    "search": True,
                    "stacks": points["stacks"],
                    "courses": points["courses"],
                    "review_evaluation_data": points["review_evaluation_data"],
                    "course": course,
                }
            else:
                data = {"elaborations": elaborations, "search": True, "course": course}
        elif selection == "complaints":
            data = {"elaborations": elaborations, "course": course, "complaints": "true"}
        else:
            data = {"elaborations": elaborations, "course": course}
        overview = render_to_string("overview.html", data, RequestContext(request))
        count = len(elaborations)
    elif selection == "questions":
        # get selected challenges from session
        challenges = []
        for serialized_challenge in serializers.deserialize("json", request.session.get("challenges", {})):
            challenges.append(serialized_challenge.object)
        count = len(challenges)
        overview = render_to_string("questions.html", {"challenges": challenges}, RequestContext(request))

    challenges = Challenge.objects.all()

    return render_to_response(
        "evaluation.html",
        {
            "challenges": challenges,
            "overview": overview,
            "count_" + request.session.get("selection", ""): count,
            "stabilosiert_" + request.session.get("selection", ""): "stabilosiert",
            "course": course,
            "selected_challenge": request.session.get("selected_challenge"),
            "selected_user": request.session.get("selected_user"),
            "selected_task": request.session.get("selected_task"),
        },
        context_instance=RequestContext(request),
    )
Esempio n. 11
0
    def test_naive_datetime(self):
        dt = datetime.datetime(2011, 9, 1, 13, 20, 30)

        data = serializers.serialize('python', [Event(dt=dt)])
        self.assertEqual(data[0]['fields']['dt'], dt)
        obj = serializers.deserialize('python', data).next().object
        self.assertEqual(obj.dt, dt)

        data = serializers.serialize('json', [Event(dt=dt)])
        self.assertIn('"fields": {"dt": "2011-09-01T13:20:30"}', data)
        obj = serializers.deserialize('json', data).
Esempio n. 12
0
def deserialize():
    """
    Записать из файла
    """
    from django.core import serializers
    data = open("D:/test2.xml")
    serializers.deserialize("xml", data, ignorenonexistent=True)
    for deserialized_object in serializers.deserialize("xml", data):
        if Student(deserialized_object):
            deserialized_object.save()
        if Group(deserialized_object):
            deserialized_object.save()
Esempio n. 13
0
def handle_document(request):
    area = None
    areacode = request.session.get('areacode')
    if areacode:
        area = Area.objects.get(id=int(areacode))


    if request.method == 'POST':

        form = DocumentAddForm(request.POST)

        if form.is_valid():
            a = Document()
            a.establishment = form.cleaned_data['establishment']
            a.documentType = form.cleaned_data['documentType']
            a.expeditionDate = form.cleaned_data['expeditionDate']
            a.expirationDate = form.cleaned_data['expirationDate']
            if form.cleaned_data['enabled']:
                a.documentStatus = statusForExpirationDate(form.cleaned_data['expirationDate'])
            else:
                a.documentStatus = DocumentStatus.objects.filter(enabled=False)[0]
            a.save()

            h = DocumentHistory()
            h.user = request.user
            h.operation = "CREATION"
            h.snapshot = to_JSON(a)
            h.document = a

            h.save()
            a.documenthistory_set.add(h)

            extraFieldsCount = DocumentTypeField.objects.filter(documentType=a.documentType).count()
            if extraFieldsCount > 0:
                return HttpResponseRedirect('/document/' + str(a.id) + '/')
            else:
                return HttpResponseRedirect('/documents/')

    else:
        selectionList = request.session.get('selection_list')
        e = None
        if 'establishment' in selectionList:
            for obj in serializers.deserialize("json", selectionList['establishment']):
                e = obj.object
        t = None
        if 'documentType' in selectionList:
            for obj in serializers.deserialize("json", selectionList['documentType']):
                t = obj.object
        st = None
        form = DocumentAddForm(initial={'establishment': e,'documentType': t })
        form.fields['enabled'].initial = True

    return render(request, 'detail_template.html', {'form': form, 'action':'/document/', 'http_method':'POST', 'area' : area})
Esempio n. 14
0
def evaluation(request, course_short_title=None):
    course = Course.get_or_raise_404(short_title=course_short_title)
    overview = ""
    elaborations = []
    count = 0
    selection = request.session.get('selection', 'error')
    if selection not in ('error', 'questions'):
        for serialized_elaboration in serializers.deserialize('json', request.session.get('elaborations', {})):
            elaborations.append(serialized_elaboration.object)
        if selection == 'search':
            display_points = request.session.get('display_points', 'error')
            if display_points == "true":
                user = AuroraUser.objects.get(username=request.session.get('selected_user'))
                points = get_points(request, user, course)
                data = {
                    'elaborations': elaborations,
                    'search': True,
                    'stacks': points['stacks'],
                    'courses': points['courses'],
                    'review_evaluation_data': points['review_evaluation_data'],
                    'course': course
                }
            else:
                data = {'elaborations': elaborations, 'search': True, 'course': course}
        elif selection == 'complaints':
            data = {'elaborations': elaborations, 'course': course, 'complaints': 'true'}
        else:
            data = {'elaborations': elaborations, 'course': course}
        overview = render_to_string('overview.html', data, RequestContext(request))
        count = len(elaborations)
    elif selection == 'questions':
        # get selected challenges from session
        challenges = []
        for serialized_challenge in serializers.deserialize('json', request.session.get('challenges', {})):
            challenges.append(serialized_challenge.object)
        count = len(challenges)
        overview = render_to_string('questions.html', {'challenges': challenges}, RequestContext(request))

    challenges = Challenge.objects.all()

    return render_to_response('evaluation.html',
                              {'challenges': challenges,
                               'overview': overview,
                               'count_' + request.session.get('selection', ''): count,
                               'stabilosiert_' + request.session.get('selection', ''): 'stabilosiert',
                               'course': course,
                               'selected_challenge': request.session.get('selected_challenge'),
                               'selected_user': request.session.get('selected_user'),
                               'selected_task': request.session.get('selected_task'),
                              },
                              context_instance=RequestContext(request))
Esempio n. 15
0
 def json_back(self, json_data):
     '''
     进行Json字符串的反序列化一般来说,从网络得回的POST(或者GET)
     参数中所包含json数据,例如,用POST传过来的参数中有一个key value键值对为
     request.POST['update']= "[{pk:1,name:'changename'},{pk:2,name:'changename2'}]"
     要将这个value进行反序列化,则可以使用Django内置的序列化与反序列化,但是问题在于
     传回的有可能是代表单个对象的json字符串如:
     request.POST['update'] = "{pk:1,name:'changename'}"
     这是,由于Django无法处理单个对象,因此要做适当的处理
     将其模拟成一个数组,也就是用'[]'进行包围,再进行反序列化
     '''
     if json_data[0] == '[':
         return deserialize('json', json_data)
     else:
         return deserialize('json', '['+json_data+']')
Esempio n. 16
0
def jsonBack(json):
    """    进行Json字符串的反序列化
        一般来说,从网络得回的POST(或者GET)参数中所包含json数据
        例如,用POST传过来的参数中有一个key value键值对为
        request.POST['update'] = "[{pk:1,name:'changename'},{pk:2,name:'changename2'}]"
        要将这个value进行反序列化则可以使用Django内置的序列化与反序列化
        但是问题在于传回的有可能是代表单个对象的json字符串,如:
        request.POST['update'] = "{pk:1,name:'changename'}"
        这是,由于Django无法处理单个对象,因此要做适当的处理
        将其模拟成一个数组,也就是用'[]'进行包围再进行反序列化
    """
    if json[0] == '[':
        return deserialize('json',json)
    else:
        return deserialize('json','[' + json + ']')
Esempio n. 17
0
    def get(self, request, *args, **kwargs):
        try:
            self.object = self.get_object()
        except Http404:
            messages.error(self.request, 'That issue was not found.')
            return redirect("/")
        context = self.get_context_data(object=self.object)
        return self.render_to_response(context)
        if self.request.GET.get('paymentId'):
            import paypalrestsdk
            paypalrestsdk.configure({
                'mode': settings.MODE,
                'client_id': settings.CLIENT_ID,
                'client_secret': settings.CLIENT_SECRET
            })

            payment = paypalrestsdk.Payment.find(self.request.GET.get('paymentId'))

            custom = payment.transactions[0].custom

            if payment.execute({"payer_id": self.request.GET.get('PayerID')}):
                for obj in serializers.deserialize("json", custom, ignorenonexistent=True):
                    obj.object.created = datetime.datetime.now()
                    obj.object.checkout_id = self.request.GET.get('checkout_id')
                    obj.save()
                    action.send(self.request.user, verb='placed a $' + str(obj.object.price) + ' bounty on ', target=obj.object.issue)
                    post_to_slack(obj.object)
                    if not settings.DEBUG:
                        create_comment(obj.object.issue)
            else:
                messages.error(request, payment.error)

        if self.request.GET.get('checkout_id'):
            wepay = WePay(settings.WEPAY_IN_PRODUCTION, settings.WEPAY_ACCESS_TOKEN)
            wepay_data = wepay.call('/checkout/', {
                'checkout_id': self.request.GET.get('checkout_id'),
            })

            for obj in serializers.deserialize("xml", wepay_data['long_description'], ignorenonexistent=True):
                obj.object.created = datetime.datetime.now()
                obj.object.checkout_id = self.request.GET.get('checkout_id')
                obj.save()
                action.send(self.request.user, verb='placed a $' + str(obj.object.price) + ' bounty on ', target=obj.object.issue)
                post_to_slack(obj.object)
                if not settings.DEBUG:
                    create_comment(obj.object.issue)

        return super(IssueDetailView, self).get(request, *args, **kwargs)
def processquerydist(request):
    # FIXME: Parallelize these queries
    
    # Get Athena post objects
    ajson = apq(request).content

    aposts = []
    for p in serializers.deserialize("json", ajson):
        aposts.append(p.object)

    # Get Zeus post objects
    zjson = zpq(request).content

    zposts = []
    for p in serializers.deserialize("json", zjson):
        zposts.append(p.object)

    # Get time and err_msg if any
    a_post_time = aposts.pop(0).text
    z_post_time = zposts.pop(0).text

    # Error message NOT used here. Taken care of with Minerva's monolithic 
    # MySQL server. But we return them nonetheless for debugging purposes.
    a_err_msg = aposts.pop(0).text
    z_err_msg = zposts.pop(0).text

    # Build error message and get execution time: max of both exec time
    err_msg = ", ".join([a_err_msg, z_err_msg])
    time_diff = max(float(a_post_time), float(z_post_time))

    # Append to form all posts
    posts = []
    posts.extend(aposts)
    posts.extend(zposts)

    # Order posts by id descendingly    
    posts = sorted(posts, key=lambda post: -post.id)

    # Get appropriate number of post results
    post_results = posts[:POST_RETURN_LIMIT]

    # Insert err_msg followed by time
    post_results.insert(0, Post(text=err_msg)) 
    post_results.insert(0, Post(text=time_diff)) 

    # Return JSON response of query logic
    data = serializers.serialize("json", post_results)
    return HttpResponse(data, content_type="application/json")
Esempio n. 19
0
def serializerTest(format, self):

    # Create all the objects defined in the test data
    objects = []
    instance_count = {}
    for (func, pk, klass, datum) in test_data:
        with connection.constraint_checks_disabled():
            objects.extend(func[0](pk, klass, datum))

    # Get a count of the number of objects created for each class
    for klass in instance_count:
        instance_count[klass] = klass.objects.count()

    # Add the generic tagged objects to the object list
    objects.extend(Tag.objects.all())

    # Serialize the test database
    serialized_data = serializers.serialize(format, objects, indent=2)

    for obj in serializers.deserialize(format, serialized_data):
        obj.save()

    # Assert that the deserialized data is the same
    # as the original source
    for (func, pk, klass, datum) in test_data:
        func[1](self, pk, klass, datum)

    # Assert that the number of objects deserialized is the
    # same as the number that was serialized.
    for klass, count in instance_count.items():
        self.assertEqual(count, klass.objects.count())
Esempio n. 20
0
 def test_helpful_error_message_for_many2many_non_natural(self):
     """
     Invalid many-to-many keys should throw a helpful error message.
     """
     test_string = """[{
         "pk": 1,
         "model": "serializers.article",
         "fields": {
             "author": 1,
             "headline": "Unknown many to many",
             "pub_date": "2014-09-15T10:35:00",
             "categories": [1, "doesnotexist"]
         }
     }, {
         "pk": 1,
         "model": "serializers.author",
         "fields": {
             "name": "Agnes"
         }
     }, {
         "pk": 1,
         "model": "serializers.category",
         "fields": {
             "name": "Reference"
         }
     }]"""
     expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
     with self.assertRaisesMessage(serializers.base.DeserializationError, expected):
         list(serializers.deserialize('json', test_string))
Esempio n. 21
0
    def test_dump_and_load_m2m_simple(self):
        """
        Test serializing and deserializing back models with simple M2M relations
        """
        a = M2MSimpleA.objects.create(data="a")
        b1 = M2MSimpleB.objects.create(data="b1")
        b2 = M2MSimpleB.objects.create(data="b2")
        a.b_set.add(b1)
        a.b_set.add(b2)

        out = StringIO()
        management.call_command(
            'dumpdata',
            'fixtures_regress.M2MSimpleA',
            'fixtures_regress.M2MSimpleB',
            use_natural_foreign_keys=True,
            stdout=out,
        )

        for model in [M2MSimpleA, M2MSimpleB]:
            model.objects.all().delete()

        objects = serializers.deserialize("json", out.getvalue())
        for obj in objects:
            obj.save()

        new_a = M2MSimpleA.objects.get_by_natural_key("a")
        self.assertQuerysetEqual(new_a.b_set.all(), [
            "<M2MSimpleB: b1>",
            "<M2MSimpleB: b2>"
        ], ordered=False)
Esempio n. 22
0
 def test_helpful_error_message_for_many2many_natural2(self):
     """
     Invalid many-to-many keys should throw a helpful error message. This
     tests the code path where a natural many-to-many key has only a single
     value.
     """
     test_string = """[{
         "pk": 1,
         "model": "serializers.article",
         "fields": {
             "author": 1,
             "headline": "Unknown many to many",
             "pub_date": "2014-09-15T10:35:00",
             "meta_data": [1, "doesnotexist"]
         }
     }, {
         "pk": 1,
         "model": "serializers.categorymetadata",
         "fields": {
             "kind": "author",
             "name": "meta1",
             "value": "Agnes"
         }
     }, {
         "pk": 1,
         "model": "serializers.author",
         "fields": {
             "name": "Agnes"
         }
     }]"""
     expected = "(serializers.article:pk=1) field_value was 'doesnotexist'"
     with self.assertRaisesMessage(serializers.base.DeserializationError, expected):
         for obj in serializers.deserialize('json', test_string, ignore=False):
             obj.save()
Esempio n. 23
0
    def _precompile_fixture(self, fixture, db):
        """Precompiles a fixture.

        The fixture is loaded and deserialized, and the resulting objects
        are stored for future use.
        """
        assert db in TestCase._precompiled_fixtures
        assert fixture not in TestCase._precompiled_fixtures[db]

        fixture_path = None

        for fixture_dir in self._get_fixture_dirs():
            fixture_path = os.path.join(fixture_dir, fixture + '.json')

            if os.path.exists(fixture_path):
                break

        try:
            if not fixture_path:
                raise IOError('Fixture path not found')

            with open(fixture_path, 'r') as fp:
                TestCase._precompiled_fixtures[db][fixture] = [
                    obj
                    for obj in serializers.deserialize('json', fp, using=db)
                    if router.allow_syncdb(db, obj.object.__class__)
                ]
        except IOError as e:
            sys.stderr.write('Unable to load fixture %s: %s\n' % (fixture, e))
Esempio n. 24
0
 def test_deserialize_force_insert(self):
     """Tests that deserialized content can be saved with force_insert as a parameter."""
     serial_str = serializers.serialize(self.serializer_name, [self.a1])
     deserial_obj = list(serializers.deserialize(self.serializer_name, serial_str))[0]
     with mock.patch('django.db.models.Model') as mock_model:
         deserial_obj.save(force_insert=False)
         mock_model.save_base.assert_called_with(deserial_obj.object, raw=True, using=None, force_insert=False)
Esempio n. 25
0
def natural_key_test(format, self):
    book1 = {
        'data': '978-1590597255',
        'title': 'The Definitive Guide to Django: Web Development Done Right',
    }
    book2 = {'data': '978-1590599969', 'title': 'Practical Django Projects'}

    # Create the books.
    adrian = NaturalKeyAnchor.objects.create(**book1)
    james = NaturalKeyAnchor.objects.create(**book2)

    # Serialize the books.
    string_data = serializers.serialize(
        format, NaturalKeyAnchor.objects.all(), indent=2,
        use_natural_foreign_keys=True, use_natural_primary_keys=True,
    )

    # Delete one book (to prove that the natural key generation will only
    # restore the primary keys of books found in the database via the
    # get_natural_key manager method).
    james.delete()

    # Deserialize and test.
    books = list(serializers.deserialize(format, string_data))
    self.assertEqual(len(books), 2)
    self.assertEqual(books[0].object.title, book1['title'])
    self.assertEqual(books[0].object.pk, adrian.pk)
    self.assertEqual(books[1].object.title, book2['title'])
    self.assertIsNone(books[1].object.pk)
Esempio n. 26
0
def sort(request, course_short_title=None):
    course = Course.get_or_raise_404(short_title=course_short_title)

    elaborations = []
    for serialized_elaboration in serializers.deserialize('json', request.session.get('elaborations', {})):
        elaborations.append(serialized_elaboration.object)

    if request.GET.get('data', '') == "date_asc":
        elaborations.sort(key=lambda elaboration: elaboration.submission_time)
    if request.GET.get('data', '') == "date_desc":
        elaborations.sort(key=lambda elaboration: elaboration.submission_time, reverse=True)
    if request.GET.get('data', '') == "elab_asc":
        elaborations.sort(key=lambda elaboration: elaboration.challenge.title)
    if request.GET.get('data', '') == "elab_desc":
        elaborations.sort(key=lambda elaboration: elaboration.challenge.title, reverse=True)
    if request.GET.get('data', '') == "post_asc":
        elaborations.sort(key=lambda elaboration: elaboration.get_last_post_date())
    if request.GET.get('data', '') == "post_desc":
        elaborations.sort(key=lambda elaboration: elaboration.get_last_post_date(), reverse=True)

    # store selected elaborations in session
    request.session['elaborations'] = serializers.serialize('json', elaborations)
    request.session['count'] = len(elaborations)

    data = {
        'overview_html': render_to_string('overview.html', {'elaborations': elaborations, 'course': course}, RequestContext(request)),
        'menu_html': render_to_string('menu.html', {
            'count_' + request.session.get('selection', ''): request.session.get('count', '0'),
            'stabilosiert_' + request.session.get('selection', ''): 'stabilosiert', 'course': course,
        }, RequestContext(request)),
        'selection': request.session['selection']
    }

    return HttpResponse(json.dumps(data))
Esempio n. 27
0
 def test_loading(self):
     instance = list(serializers.deserialize('json', self.test_data))[0].object
     self.assertEqual(instance.ints, NumericRange(0, 10))
     self.assertEqual(instance.floats, NumericRange(empty=True))
     self.assertEqual(instance.bigints, None)
     self.assertEqual(instance.dates, DateRange(self.lower_date, self.upper_date))
     self.assertEqual(instance.timestamps, DateTimeTZRange(self.lower_dt, self.upper_dt))
    def setUp(self):
        # Usage:
        #       Constructor for TestViewIndex
        # Arguments:
        # 		None

        # Every test needs access to the request factory.
        self.factory = RequestFactory()

        # An array of model file locations
        model_array = [
            "./unit_tests/serialized_test_data/yelp/review.json",
            "./unit_tests/serialized_test_data/facebook/group.json",
            "./unit_tests/serialized_test_data/facebook/post.json",
            "./unit_tests/serialized_test_data/facebook/user.json"
        ]

        # Loop through model file locations
        for file_locations in model_array:

            # Open the file
            with open(file_locations) as data_file:

                # Read the file
                data = data_file.read()

                # Loop through each data string, and deserializea, and save the models
                for deserialized_object in serializers.deserialize("json", data):
                    deserialized_object.save()
Esempio n. 29
0
    def handle(self, *args, **options):
        if not args:
            raise CommandError(u"You must provide a URL.")

        url = args[0]

        try:
            r = requests.get(url)
        except exceptions.MissingSchema as e:
            raise CommandError(e.message)

        if r.status_code != 200:
            raise CommandError("Received status {0} from {1},"
                               "expected 200.".format(r.status_code, url))

        try:
            for obj in serializers.deserialize("json", r.content):
                msg = 'Processing "{0}"...\n'.format(obj.object.title)
                self.stdout.write(msg)
                try:
                    Post.objects.get(slug=obj.object.slug)
                    msg = ('Already had existing object with the slug '
                           '"{0}".\n'.format(obj.object.slug))
                    self.stdout.write(msg)
                except Post.DoesNotExist:
                    obj.save()
                    self.stdout.write(u'Saved new object.\n')
        except DeserializationError as e:
            raise CommandError(e.message)
Esempio n. 30
0
def _prepare_nodes(workflow, root):
  """
  Prepare nodes for groking by Django
  - Deserialize
  - Automatically skip undefined nodes.
  """
  objs = serializers.deserialize('xml', etree.tostring(root))

  # First pass is a list of nodes and their types respectively.
  # Must link up nodes with their respective full nodes.
  node = None
  nodes = []
  for obj in objs:
    obj.object.workflow = workflow
    if type(obj.object) is Node:
      node = obj.object
    else:
      node.node_type = obj.object.node_type
      full_node = obj.object
      for k, v in vars(node).items():
        if not k.startswith('_') and k not in ('node_type','workflow','node_ptr_id'):
          setattr(full_node, k, v)
      full_node.workflow = workflow
      full_node.node_type = type(full_node).node_type
      full_node.node_ptr_id = None
      full_node.id = None

      nodes.append(full_node)

  return nodes
Esempio n. 31
0
 def load_table(self, backup_dir, table_name):
     filename = os.path.join(backup_dir, backup.TABLES_DIRNAME,
                             table_name + '.json')
     data = open(filename, 'r').read()
     return list(serializers.deserialize('json', data))
Esempio n. 32
0
 def field_value(self):
     instances = serializers.deserialize('json', self.serialized_data)
     instance = list(instances)[0].object
     return getattr(instance, self.field_name)
Esempio n. 33
0
def deserialize_object(obj):
    silaba_obj = []
    for d_obj in serializers.deserialize('json', obj):
        silaba_obj.append(d_obj.object)
    return silaba_obj
Esempio n. 34
0
 def test_serializer_roundtrip(self):
     """Tests that serialized content can be deserialized."""
     serial_str = serializers.serialize(self.serializer_name,
                                        Article.objects.all())
     models = list(serializers.deserialize(self.serializer_name, serial_str))
     self.assertEqual(len(models), 2)
Esempio n. 35
0
def forwards(apps, schema_editor):
    data = [{
        "pk": 1,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "includes",
            "name": "LK3 transgenic mice"
        }
    }, {
        "pk": 2,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "misnomer",
            "name": "Mus muscaris"
        }
    }, {
        "pk": 3,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "scientific name",
            "name": "Mus musculus"
        }
    }, {
        "pk": 4,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "Mus musculus",
            "type": "abbreviation",
            "name": "M. musculus"
        }
    }, {
        "pk": 5,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "authority",
            "name": "Mus musculus Linnaeus, 1758"
        }
    }, {
        "pk": 6,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "includes",
            "name": "Mus sp. 129SV"
        }
    }, {
        "pk": 7,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "genbank common name",
            "name": "house mouse"
        }
    }, {
        "pk": 8,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "misspelling",
            "name": "mice C57BL/6xCBA/CaJ hybrid"
        }
    }, {
        "pk": 9,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "common name",
            "name": "mouse"
        }
    }, {
        "pk": 10,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "includes",
            "name": "nude mice"
        }
    }, {
        "pk": 11,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 10090,
            "unique_name": "",
            "type": "includes",
            "name": "transgenic mice"
        }
    }, {
        "pk": 12,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 6239,
            "unique_name": "",
            "type": "scientific name",
            "name": "Caenorhabditis elegans"
        }
    }, {
        "pk": 13,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 6239,
            "unique_name": "Caenorhabditis elegans",
            "type": "abbreviation",
            "name": "C. elegans"
        }
    }, {
        "pk": 14,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 6239,
            "unique_name": "",
            "type": "authority",
            "name": "Caenorhabditis elegans (Maupas, 1900)"
        }
    }, {
        "pk": 15,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 6239,
            "unique_name": "",
            "type": "synonym",
            "name": "Rhabditis elegans"
        }
    }, {
        "pk": 16,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 6239,
            "unique_name": "",
            "type": "authority",
            "name": "Rhabditis elegans Maupas, 1900"
        }
    }, {
        "pk": 17,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 6239,
            "unique_name": "nematode <Caenorhabditis elegans>",
            "type": "common name",
            "name": "nematode"
        }
    }, {
        "pk": 18,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7227,
            "unique_name": "",
            "type": "misspelling",
            "name": "Drosophila melangaster"
        }
    }, {
        "pk": 19,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7227,
            "unique_name": "",
            "type": "scientific name",
            "name": "Drosophila melanogaster"
        }
    }, {
        "pk": 20,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7227,
            "unique_name": "Drosophila melanogaster",
            "type": "abbreviation",
            "name": "D. melanogaster"
        }
    }, {
        "pk": 21,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7227,
            "unique_name": "",
            "type": "authority",
            "name": "Drosophila melanogaster Meigen, 1830"
        }
    }, {
        "pk": 22,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7227,
            "unique_name": "",
            "type": "genbank common name",
            "name": "fruit fly"
        }
    }, {
        "pk": 23,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 9606,
            "unique_name": "",
            "type": "scientific name",
            "name": "H**o sapiens"
        }
    }, {
        "pk": 24,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 9606,
            "unique_name": "H**o sapiens",
            "type": "abbreviation",
            "name": "H. sapiens"
        }
    }, {
        "pk": 25,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 9606,
            "unique_name": "",
            "type": "authority",
            "name": "H**o sapiens Linnaeus, 1758"
        }
    }, {
        "pk": 26,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 9606,
            "unique_name": "",
            "type": "genbank common name",
            "name": "human"
        }
    }, {
        "pk": 27,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 9606,
            "unique_name": "",
            "type": "common name",
            "name": "man"
        }
    }, {
        "pk": 28,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 4896,
            "unique_name": "",
            "type": "common name",
            "name": "fission yeast"
        }
    }, {
        "pk": 29,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 4896,
            "unique_name": "",
            "type": "scientifc name",
            "name": "Schizosaccharomyces pombe"
        }
    }, {
        "pk": 30,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 4896,
            "unique_name": "Schizosaccharomyces pombe",
            "type": "abbreviation name",
            "name": "S. pombe"
        }
    }, {
        "pk": 31,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "scientific name",
            "name": "Danio rerio"
        }
    }, {
        "pk": 32,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "Danio rerio",
            "type": "abbreviation",
            "name": "D. rerio"
        }
    }, {
        "pk": 33,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "zebra fish <Danio rerio>",
            "type": "common name",
            "name": "zebra fish"
        }
    }, {
        "pk": 34,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "misspelling",
            "name": "Brachidanio rerio"
        }
    }, {
        "pk": 35,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "synonym",
            "name": "Brachydanio rerio"
        }
    }, {
        "pk": 36,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "synonym",
            "name": "Brachydanio rerio frankei"
        }
    }, {
        "pk": 37,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "synonym",
            "name": "Cyprinus rerio"
        }
    }, {
        "pk": 38,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "synonym",
            "name": "Cyprinus rerio Hamilton, 1822"
        }
    }, {
        "pk": 39,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "synonym",
            "name": "Danio frankei"
        }
    }, {
        "pk": 40,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "synonym",
            "name": "Danio rerio (Hamilton, 1822)"
        }
    }, {
        "pk": 41,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "synonym",
            "name": "Danio rerio frankei"
        }
    }, {
        "pk": 42,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "common name",
            "name": "leopard danio"
        }
    }, {
        "pk": 43,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "common name",
            "name": "zebra danio"
        }
    }, {
        "pk": 44,
        "model": "annotation_server.taxon",
        "fields": {
            "taxon_id": 7955,
            "unique_name": "",
            "type": "genbank common name",
            "name": "zebrafish"
        }
    }, {
        "pk": 1,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name":
            "GRCh37 Genome Reference Consortium Human Reference 37 (GCA_000001405.1)",
            "name": "hg19",
            "default_build": True,
            "html_path": "/gbdb/hg19/html/description.html",
            "affiliation": "UCSC",
            "species": 23,
            "description": "Feb. 2009 (GRCh37/hg19)"
        }
    }, {
        "pk": 2,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "Genome Reference Consortium GRCh37",
            "default_build": False,
            "html_path": None,
            "affiliation": "Genome Reference Consortium",
            "species": 23,
            "description": "Feb. 2009"
        }
    }, {
        "pk": 3,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": "NCBI Build 36.1",
            "name": "hg18",
            "default_build": False,
            "html_path": "/gbdb/hg18/html/description.html",
            "affiliation": "UCSC",
            "species": 23,
            "description": "Mar. 2006 (NCBI36/hg18)"
        }
    }, {
        "pk": 4,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "NCBI Build 36.1",
            "default_build": False,
            "html_path": None,
            "affiliation": "NCBI",
            "species": 23,
            "description": "Mar. 2006"
        }
    }, {
        "pk": 5,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": "NCBI Build 36",
            "name": "mm8",
            "default_build": False,
            "html_path": "/gbdb/mm8/html/description.html",
            "affiliation": "UCSC",
            "species": 3,
            "description": "Feb. 2006 (NCBI36/mm8)"
        }
    }, {
        "pk": 6,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "NCBI Build 36",
            "default_build": False,
            "html_path": None,
            "affiliation": "NCBI",
            "species": 3,
            "description": "Feb. 2006"
        }
    }, {
        "pk": 7,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": "NCBI Build 37",
            "name": "mm9",
            "default_build": True,
            "html_path": "/gbdb/mm9/html/description.html",
            "affiliation": "UCSC",
            "species": 3,
            "description": "July 2007 (NCBI37/mm9)"
        }
    }, {
        "pk": 8,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "NCBI Build 37",
            "default_build": False,
            "html_path": None,
            "affiliation": "NCBI",
            "species": 3,
            "description": "Jul. 2007"
        }
    }, {
        "pk": 9,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name":
            "Genome Reference Consortium Mouse Build 38 (GCA_000001635.2)",
            "name": "mm10",
            "default_build": False,
            "html_path": "/gbdb/mm10/html/description.html",
            "affiliation": "UCSC",
            "species": 3,
            "description": "Dec. 2011 (GRCm38/mm10)"
        }
    }, {
        "pk": 10,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "Genome Reference Consortium GRCm38",
            "default_build": False,
            "html_path": None,
            "affiliation": "Genome Reference Consortium",
            "species": 3,
            "description": "Dec. 2011"
        }
    }, {
        "pk": 11,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": "BDGP Release 5",
            "name": "dm3",
            "default_build": True,
            "html_path": "/gbdb/dm3/html/description.html",
            "affiliation": "UCSC",
            "species": 19,
            "description": "Apr. 2006 (BDGP R5/dm3)"
        }
    }, {
        "pk": 12,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "BDGP Release 5",
            "default_build": False,
            "html_path": None,
            "affiliation": "BDGP",
            "species": 19,
            "description": "Apr. 2006"
        }
    }, {
        "pk": 13,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": "BDGP v. 4 / DHGP v. 3.2",
            "name": "dm2",
            "default_build": False,
            "html_path": "/gbdb/dm2/html/description.html",
            "affiliation": "UCSC",
            "species": 19,
            "description": "Apr. 2004 (BDGP R4/dm2)"
        }
    }, {
        "pk": 14,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "BDGP Release 4",
            "default_build": False,
            "html_path": None,
            "affiliation": "BDGP",
            "species": 19,
            "description": "Apr. 2004"
        }
    }, {
        "pk": 15,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": "BDGP v. 3",
            "name": "dm1",
            "default_build": False,
            "html_path": "/gbdb/dm1/html/description.html",
            "affiliation": "UCSC",
            "species": 19,
            "description": "Jan. 2003 (BDGP R3/dm1)"
        }
    }, {
        "pk": 16,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "BDGP Release 3",
            "default_build": False,
            "html_path": None,
            "affiliation": "BDGP",
            "species": 19,
            "description": "Jan. 2003"
        }
    }, {
        "pk": 17,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name":
            "Washington University School of Medicine GSC and Sanger Institute WS220",
            "name": "ce10",
            "default_build": False,
            "html_path": "/gbdb/ce10/html/description.html",
            "affiliation": "UCSC",
            "species": 12,
            "description": "Oct. 2010 (WS220/ce10)"
        }
    }, {
        "pk": 18,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "WormBase v. WS220",
            "default_build": False,
            "html_path": None,
            "affiliation": "WormBase",
            "species": 12,
            "description": "Oct. 2010"
        }
    }, {
        "pk": 19,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name":
            "Washington University School of Medicine GSC and Sanger Institute WS190",
            "name": "ce6",
            "default_build": True,
            "html_path": "/gbdb/ce6/html/description.html",
            "affiliation": "UCSC",
            "species": 12,
            "description": "May 2008 (WS190/ce6)"
        }
    }, {
        "pk": 20,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "WormBase v. WS190",
            "default_build": False,
            "html_path": None,
            "affiliation": "WormBase",
            "species": 12,
            "description": "May 2008"
        }
    }, {
        "pk": 21,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": None,
            "name": "spombe_1.55",
            "default_build": True,
            "html_path": None,
            "affiliation": "",
            "species": 29,
            "description":
            "Added manually by Nils Gehlenborg on 15 March 2013."
        }
    }, {
        "pk": 22,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": False,
            "source_name": "Sanger Institute",
            "name": "danRer7",
            "default_build": True,
            "html_path": "",
            "affiliation": "UCSC",
            "species": 31,
            "description": "Jul. 2010 (Zv9/danRer7)"
        }
    }, {
        "pk": 23,
        "model": "annotation_server.genomebuild",
        "fields": {
            "available": True,
            "source_name": "",
            "name": "Zv9",
            "default_build": False,
            "html_path": "",
            "affiliation": "Sanger Institute",
            "species": 31,
            "description": "Jul. 2010"
        }
    }]

    data_as_json = json.dumps([ob for ob in data])
    objects = serializers.deserialize('json',
                                      data_as_json,
                                      ignorenonexistent=True)
    for obj in objects:
        obj.save()
Esempio n. 36
0
 def deserialize(cls, serialized_data):
     # 需要加 .object 来得到原始的 model 类型的 object 数据,要不然得到的数据并不是一个
     # ORM 的 object,而是一个 DeserializedObject 的类型
     return list(serializers.deserialize('json', serialized_data))[0].object
Esempio n. 37
0
def schedule_compete(e1_json, e2_json):
    e1 = next(deserialize('json', e1_json)).object
    e2 = next(deserialize('json', e2_json)).object
    fight(e1, e2)
Esempio n. 38
0
    def process_dir(self, fixture_dir, fixture_name, compression_formats,
                    serialization_formats):

        humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path'

        if self.verbosity >= 2:
            self.stdout.write("Checking %s for fixtures..." % humanize(fixture_dir))

        label_found = False
        for combo in product([self.using, None], serialization_formats, compression_formats):
            database, format, compression_format = combo
            file_name = '.'.join(
                p for p in [
                    fixture_name, database, format, compression_format
                ]
                if p
            )

            if self.verbosity >= 3:
                self.stdout.write("Trying %s for %s fixture '%s'..." % \
                    (humanize(fixture_dir), file_name, fixture_name))
            full_path = os.path.join(fixture_dir, file_name)
            open_method = self.compression_types[compression_format]
            try:
                fixture = open_method(full_path, 'r')
            except IOError:
                if self.verbosity >= 2:
                    self.stdout.write("No %s fixture '%s' in %s." % \
                        (format, fixture_name, humanize(fixture_dir)))
            else:
                try:
                    if label_found:
                        raise CommandError("Multiple fixtures named '%s' in %s. Aborting." %
                            (fixture_name, humanize(fixture_dir)))

                    self.fixture_count += 1
                    objects_in_fixture = 0
                    loaded_objects_in_fixture = 0
                    if self.verbosity >= 2:
                        self.stdout.write("Installing %s fixture '%s' from %s." % \
                            (format, fixture_name, humanize(fixture_dir)))

                    objects = serializers.deserialize(format, fixture, using=self.using, ignorenonexistent=self.ignore)

                    for obj in objects:
                        objects_in_fixture += 1
                        if router.allow_syncdb(self.using, obj.object.__class__):
                            loaded_objects_in_fixture += 1
                            self.models.add(obj.object.__class__)
                            try:
                                obj.save(using=self.using)
                            except (DatabaseError, IntegrityError) as e:
                                e.args = ("Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s" % {
                                        'app_label': obj.object._meta.app_label,
                                        'object_name': obj.object._meta.object_name,
                                        'pk': obj.object.pk,
                                        'error_msg': force_text(e)
                                    },)
                                raise

                    self.loaded_object_count += loaded_objects_in_fixture
                    self.fixture_object_count += objects_in_fixture
                    label_found = True
                except Exception as e:
                    if not isinstance(e, CommandError):
                        e.args = ("Problem installing fixture '%s': %s" % (full_path, e),)
                    raise
                finally:
                    fixture.close()

                # If the fixture we loaded contains 0 objects, assume that an
                # error was encountered during fixture loading.
                if objects_in_fixture == 0:
                    raise CommandError(
                        "No fixture data found for '%s'. (File format may be invalid.)" %
                            (fixture_name))
Esempio n. 39
0
    def loaddata(self, fixture_labels):
        connection = connections[self.using]

        # Keep a count of the installed objects and fixtures
        self.fixture_count = 0
        self.loaded_object_count = 0
        self.fixture_object_count = 0
        self.models = set()

        self.serialization_formats = serializers.get_public_serializer_formats()
        # Forcing binary mode may be revisited after dropping Python 2 support
        # (see #22399)
        self.compression_formats = {
            None: (open, 'rb'),
            'gz': (gzip.GzipFile, 'rb'),
            'zip': (SingleZipReader, 'r'),
        }
        if has_bz2:
            self.compression_formats['bz2'] = (bz2.BZ2File, 'r')

        skiped = []
        processed = []
        loaded_objects = []
        missing_model = []
        missing_fks = []

        with connection.constraint_checks_disabled():
            # for fixture_label in fixture_labels:
            #    self.load_label(fixture_label)
            objects_in_fixture = 0
            loaded_objects_in_fixture = 0
            for path in fixture_labels:
                if self.verbosity >= 2:
                    self.stdout.write("Installing %s fixture" %
                                      (humanize(path)))

                with open(path, 'r') as fixture_file:
                    self.fixture_count += 1
                    try:
                        data = fixture_file.read()
                        objects = serializers.deserialize('json', data,
                                                          using=self.using, ignorenonexistent=self.ignore)
                        # evaluate
                        objects = list(objects)

                    except DeserializationError as ex:
                        skiped.append(path)
                        fixture_labels.remove(path)
                        # little comic there
                        if 'Invalid model identifier' in str(ex):
                            missing_model.append(str(ex))
                        elif 'matching query does not exist' in str(ex):
                            missing_fks += objects
                        else:
                            self.stderr.write(
                                'DeserializationError(%s) raised during serialization %s fixture.' % (ex, path))
                    except Exception as e:
                        fixture_labels.remove(path)
                        self.stderr.write(
                            'Exception %s %s raised during loading %s fixture.' % (str(e), e.__class__.__name__, path))
                    else:
                        # everythink is ok
                        loaded_objects += objects
                        processed.append(path)
                        fixture_labels.remove(path)

            # raise Exception(unloaded)
            self.loaded_object_count += loaded_objects_in_fixture
            self.fixture_object_count += objects_in_fixture
        return fixture_labels, processed, skiped, loaded_objects, missing_fks

        # Since we disabled constraint checks, we must manually check for
        # any invalid keys that might have been added
        table_names = [model._meta.db_table for model in self.models]
        try:
            connection.check_constraints(table_names=table_names)
        except Exception as e:
            e.args = ("Problem installing fixtures: %s" % e,)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if self.loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                no_style(), self.models)
            if sequence_sql:
                if self.verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                with connection.cursor() as cursor:
                    for line in sequence_sql:
                        cursor.execute(line)

        if self.verbosity >= 1:
            if self.fixture_count == 0 and self.hide_empty:
                pass
            elif self.fixture_object_count == self.loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" %
                                  (self.loaded_object_count, self.fixture_count))
            else:
                self.stdout.write("Installed %d object(s) (of %d) from %d fixture(s)" %
                                  (self.loaded_object_count, self.fixture_object_count, self.fixture_count))
Esempio n. 40
0
 def test_deserialize_json(self):
     stream = serializers.serialize('json', Post.objects.all())
     obj = list(serializers.deserialize('json', stream))[0]
     self.assertEqual(obj.object, self.mp)
Esempio n. 41
0
 def test_json_deserializer_exception(self):
     with self.assertRaises(DeserializationError):
         for obj in serializers.deserialize("json", """[{"pk":1}"""):
             pass
Esempio n. 42
0
from django.core import serializers

with open('fixtures\initialdata.json', 'r') as fopen:
    for obj in serializers.deserialize("json", fopen.read()):
        obj.save()
Esempio n. 43
0
    def test_set_field_serializes_and_deserializes(self):
        obj = IterableFieldModel(set_field=set(["foo", "bar"]))
        data = serializers.serialize("json", [obj])

        new_obj = next(serializers.deserialize("json", data)).object
        self.assertEqual(new_obj.set_field, set(["foo", "bar"]))
Esempio n. 44
0
    def load_label(self, fixture_label):
        """Load fixtures files for a given label."""
        show_progress = self.verbosity >= 3
        for fixture_file, fixture_dir, fixture_name in self.find_fixtures(
                fixture_label):
            _, ser_fmt, cmp_fmt = self.parse_name(
                os.path.basename(fixture_file))
            open_method, mode = self.compression_formats[cmp_fmt]
            fixture = open_method(fixture_file, mode)
            try:
                self.fixture_count += 1
                objects_in_fixture = 0
                loaded_objects_in_fixture = 0
                if self.verbosity >= 2:
                    self.stdout.write(
                        "Installing %s fixture '%s' from %s." %
                        (ser_fmt, fixture_name, humanize(fixture_dir)))

                objects = serializers.deserialize(
                    ser_fmt,
                    fixture,
                    using=self.using,
                    ignorenonexistent=self.ignore,
                    handle_forward_references=True,
                )

                for obj in objects:
                    objects_in_fixture += 1
                    if (obj.object._meta.app_config in self.excluded_apps
                            or type(obj.object) in self.excluded_models):
                        continue
                    if router.allow_migrate_model(self.using,
                                                  obj.object.__class__):
                        loaded_objects_in_fixture += 1
                        self.models.add(obj.object.__class__)
                        try:
                            obj.save(using=self.using)
                            if show_progress:
                                self.stdout.write('\rProcessed %i object(s).' %
                                                  loaded_objects_in_fixture,
                                                  ending='')
                        # psycopg2 raises ValueError if data contains NUL chars.
                        except (DatabaseError, IntegrityError,
                                ValueError) as e:
                            e.args = (
                                "Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s"
                                % {
                                    'app_label': obj.object._meta.app_label,
                                    'object_name':
                                    obj.object._meta.object_name,
                                    'pk': obj.object.pk,
                                    'error_msg': e,
                                }, )
                            raise
                    if obj.deferred_fields:
                        self.objs_with_deferred_fields.append(obj)
                if objects and show_progress:
                    self.stdout.write(
                    )  # Add a newline after progress indicator.
                self.loaded_object_count += loaded_objects_in_fixture
                self.fixture_object_count += objects_in_fixture
            except Exception as e:
                if not isinstance(e, CommandError):
                    e.args = ("Problem installing fixture '%s': %s" %
                              (fixture_file, e), )
                raise
            finally:
                fixture.close()

            # Warn if the fixture we loaded contains 0 objects.
            if objects_in_fixture == 0:
                warnings.warn(
                    "No fixture data found for '%s'. (File format may be "
                    "invalid.)" % fixture_name, RuntimeWarning)
Esempio n. 45
0
 def __upload_fixtures(fixtures):
     with transaction.atomic():
         for fixture in fixtures:
             with open(str(fixture), "r") as file:
                 for obj in serializers.deserialize("json", file.read()):
                     obj.save()
 def test_empty(self):
     instance = BigCharSetModel(field=set())
     data = serializers.serialize('json', [instance])
     objs = list(serializers.deserialize('json', data))
     instance = objs[0].object
     assert instance.field == set()
Esempio n. 47
0
    def reimport_scan(self, scan, scan_type, test, active=True, verified=True, tags=None, minimum_severity=None,
                    user=None, endpoints_to_add=None, scan_date=None, version=None, branch_tag=None, build_id=None,
                    commit_hash=None, push_to_jira=None, close_old_findings=True, group_by=None, api_scan_configuration=None,
                    service=None):

        logger.debug(f'REIMPORT_SCAN: parameters: {locals()}')

        user = user or get_current_user()

        now = timezone.now()

        if api_scan_configuration:
            if api_scan_configuration.product != test.engagement.product:
                raise ValidationError('API Scan Configuration has to be from same product as the Test')
            if test.api_scan_configuration != api_scan_configuration:
                test.api_scan_configuration = api_scan_configuration
                test.save()

        # check if the parser that handle the scan_type manage tests
        parser = get_parser(scan_type)
        if hasattr(parser, 'get_tests'):
            logger.debug('REIMPORT_SCAN parser v2: Create parse findings')
            tests = parser.get_tests(scan_type, scan)
            # for now we only consider the first test in the list and artificially aggregate all findings of all tests
            # this is the same as the old behavior as current import/reimporter implementation doesn't handle the case
            # when there is more than 1 test
            parsed_findings = []
            for test_raw in tests:
                parsed_findings.extend(test_raw.findings)
        else:
            logger.debug('REIMPORT_SCAN: Parse findings')
            parsed_findings = parser.get_findings(scan, test)

        logger.debug('REIMPORT_SCAN: Processing findings')
        new_findings = []
        reactivated_findings = []
        findings_to_mitigate = []
        untouched_findings = []
        if settings.ASYNC_FINDING_IMPORT:
            chunk_list = importer_utils.chunk_list(parsed_findings)
            results_list = []
            # First kick off all the workers
            for findings_list in chunk_list:
                result = self.process_parsed_findings(test, findings_list, scan_type, user, active, verified,
                                                      minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add,
                                                      push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=False)
                # Since I dont want to wait until the task is done right now, save the id
                # So I can check on the task later
                results_list += [result]
            # After all tasks have been started, time to pull the results
            logger.debug('REIMPORT_SCAN: Collecting Findings')
            for results in results_list:
                serial_new_findings, serial_reactivated_findings, serial_findings_to_mitigate, serial_untouched_findings = results.get()
                new_findings += [next(serializers.deserialize("json", finding)).object for finding in serial_new_findings]
                reactivated_findings += [next(serializers.deserialize("json", finding)).object for finding in serial_reactivated_findings]
                findings_to_mitigate += [next(serializers.deserialize("json", finding)).object for finding in serial_findings_to_mitigate]
                untouched_findings += [next(serializers.deserialize("json", finding)).object for finding in serial_untouched_findings]
            logger.debug('REIMPORT_SCAN: All Findings Collected')
            # Indicate that the test is not complete yet as endpoints will still be rolling in.
            test.percent_complete = 50
            test.save()
            importer_utils.update_test_progress(test)
        else:
            new_findings, reactivated_findings, findings_to_mitigate, untouched_findings = \
                self.process_parsed_findings(test, parsed_findings, scan_type, user, active, verified,
                                             minimum_severity=minimum_severity, endpoints_to_add=endpoints_to_add,
                                             push_to_jira=push_to_jira, group_by=group_by, now=now, service=service, scan_date=scan_date, sync=True)

        closed_findings = []
        if close_old_findings:
            logger.debug('REIMPORT_SCAN: Closing findings no longer present in scan report')
            closed_findings = self.close_old_findings(test, findings_to_mitigate, scan_date, user=user, push_to_jira=push_to_jira)

        logger.debug('REIMPORT_SCAN: Updating test/engagement timestamps')
        importer_utils.update_timestamps(test, version, branch_tag, build_id, commit_hash, now, scan_date)

        if settings.TRACK_IMPORT_HISTORY:
            logger.debug('REIMPORT_SCAN: Updating Import History')
            importer_utils.update_import_history(Test_Import.REIMPORT_TYPE, active, verified, tags, minimum_severity, endpoints_to_add,
                                                 version, branch_tag, build_id, commit_hash, push_to_jira, close_old_findings,
                                                 test, new_findings, closed_findings, reactivated_findings)

        logger.debug('REIMPORT_SCAN: Generating notifications')

        updated_count = len(closed_findings) + len(reactivated_findings) + len(new_findings)
        if updated_count > 0:
            notifications_helper.notify_scan_added(test, updated_count, new_findings=new_findings, findings_mitigated=closed_findings,
                                                    findings_reactivated=reactivated_findings, findings_untouched=untouched_findings)

        logger.debug('REIMPORT_SCAN: Done')

        return test, updated_count, len(new_findings), len(closed_findings), len(reactivated_findings), len(untouched_findings)
Esempio n. 48
0
    def handle(self, *fixture_labels, **options):

        ignore = options.get('ignore')
        using = options.get('database')

        connection = connections[using]

        if not len(fixture_labels):
            raise CommandError(
                "No database fixture specified. Please provide the path of at "
                "least one fixture in the command line.")

        verbosity = int(options.get('verbosity'))
        show_traceback = options.get('traceback')

        # commit is a stealth option - it isn't really useful as
        # a command line option, but it can be useful when invoking
        # loaddata from within another script.
        # If commit=True, loaddata will use its own transaction;
        # if commit=False, the data load SQL will become part of
        # the transaction in place when loaddata was invoked.
        commit = options.get('commit', True)

        # Keep a count of the installed objects and fixtures
        fixture_count = 0
        loaded_object_count = 0
        fixture_object_count = 0
        models = set()

        humanize = lambda dirname: "'%s'" % dirname if dirname else 'absolute path'

        # Get a cursor (even though we don't need one yet). This has
        # the side effect of initializing the test database (if
        # it isn't already initialized).
        cursor = connection.cursor()

        # Start transaction management. All fixtures are installed in a
        # single transaction to ensure that all references are resolved.
        if commit:
            transaction.commit_unless_managed(using=using)
            transaction.enter_transaction_management(using=using)
            transaction.managed(True, using=using)

        class SingleZipReader(zipfile.ZipFile):
            def __init__(self, *args, **kwargs):
                zipfile.ZipFile.__init__(self, *args, **kwargs)
                if settings.DEBUG:
                    assert len(
                        self.namelist()
                    ) == 1, "Zip-compressed fixtures must contain only one file."

            def read(self):
                return zipfile.ZipFile.read(self, self.namelist()[0])

        compression_types = {
            None: open,
            'gz': gzip.GzipFile,
            'zip': SingleZipReader
        }
        if has_bz2:
            compression_types['bz2'] = bz2.BZ2File

        app_module_paths = []
        for app in get_apps():
            if hasattr(app, '__path__'):
                # It's a 'models/' subpackage
                for path in app.__path__:
                    app_module_paths.append(path)
            else:
                # It's a models.py module
                app_module_paths.append(app.__file__)

        app_fixtures = [
            os.path.join(os.path.dirname(path), 'fixtures')
            for path in app_module_paths
        ]

        try:
            with connection.constraint_checks_disabled():
                for fixture_label in fixture_labels:
                    parts = fixture_label.split('.')

                    if len(parts) > 1 and parts[-1] in compression_types:
                        compression_formats = [parts[-1]]
                        parts = parts[:-1]
                    else:
                        compression_formats = compression_types.keys()

                    if len(parts) == 1:
                        fixture_name = parts[0]
                        formats = serializers.get_public_serializer_formats()
                    else:
                        fixture_name, format = '.'.join(parts[:-1]), parts[-1]
                        if format in serializers.get_public_serializer_formats(
                        ):
                            formats = [format]
                        else:
                            formats = []

                    if formats:
                        if verbosity >= 2:
                            self.stdout.write("Loading '%s' fixtures..." %
                                              fixture_name)
                    else:
                        raise CommandError(
                            "Problem installing fixture '%s': %s is not a known serialization format."
                            % (fixture_name, format))

                    if os.path.isabs(fixture_name):
                        fixture_dirs = [fixture_name]
                    else:
                        fixture_dirs = app_fixtures + list(
                            settings.FIXTURE_DIRS) + ['']

                    for fixture_dir in fixture_dirs:
                        if verbosity >= 2:
                            self.stdout.write("Checking %s for fixtures..." %
                                              humanize(fixture_dir))

                        label_found = False
                        for combo in product([using, None], formats,
                                             compression_formats):
                            database, format, compression_format = combo
                            file_name = '.'.join(p for p in [
                                fixture_name, database, format,
                                compression_format
                            ] if p)

                            if verbosity >= 3:
                                self.stdout.write("Trying %s for %s fixture '%s'..." % \
                                    (humanize(fixture_dir), file_name, fixture_name))
                            full_path = os.path.join(fixture_dir, file_name)
                            open_method = compression_types[compression_format]
                            try:
                                fixture = open_method(full_path, 'r')
                            except IOError:
                                if verbosity >= 2:
                                    self.stdout.write("No %s fixture '%s' in %s." % \
                                        (format, fixture_name, humanize(fixture_dir)))
                            else:
                                try:
                                    if label_found:
                                        raise CommandError(
                                            "Multiple fixtures named '%s' in %s. Aborting."
                                            % (fixture_name,
                                               humanize(fixture_dir)))

                                    fixture_count += 1
                                    objects_in_fixture = 0
                                    loaded_objects_in_fixture = 0
                                    if verbosity >= 2:
                                        self.stdout.write("Installing %s fixture '%s' from %s." % \
                                            (format, fixture_name, humanize(fixture_dir)))

                                    objects = serializers.deserialize(
                                        format,
                                        fixture,
                                        using=using,
                                        ignorenonexistent=ignore)

                                    for obj in objects:
                                        objects_in_fixture += 1
                                        if router.allow_syncdb(
                                                using, obj.object.__class__):
                                            loaded_objects_in_fixture += 1
                                            models.add(obj.object.__class__)
                                            try:
                                                obj.save(using=using)
                                            except (DatabaseError,
                                                    IntegrityError) as e:
                                                e.args = (
                                                    "Could not load %(app_label)s.%(object_name)s(pk=%(pk)s): %(error_msg)s"
                                                    % {
                                                        'app_label':
                                                        obj.object._meta.
                                                        app_label,
                                                        'object_name':
                                                        obj.object._meta.
                                                        object_name,
                                                        'pk':
                                                        obj.object.pk,
                                                        'error_msg':
                                                        force_text(e)
                                                    }, )
                                                raise

                                    loaded_object_count += loaded_objects_in_fixture
                                    fixture_object_count += objects_in_fixture
                                    label_found = True
                                except Exception as e:
                                    if not isinstance(e, CommandError):
                                        e.args = (
                                            "Problem installing fixture '%s': %s"
                                            % (full_path, e), )
                                    raise
                                finally:
                                    fixture.close()

                                # If the fixture we loaded contains 0 objects, assume that an
                                # error was encountered during fixture loading.
                                if objects_in_fixture == 0:
                                    raise CommandError(
                                        "No fixture data found for '%s'. (File format may be invalid.)"
                                        % (fixture_name))

            # Since we disabled constraint checks, we must manually check for
            # any invalid keys that might have been added
            table_names = [model._meta.db_table for model in models]
            try:
                connection.check_constraints(table_names=table_names)
            except Exception as e:
                e.args = ("Problem installing fixtures: %s" % e, )
                raise

        except (SystemExit, KeyboardInterrupt):
            raise
        except Exception as e:
            if commit:
                transaction.rollback(using=using)
                transaction.leave_transaction_management(using=using)
            raise

        # If we found even one object in a fixture, we need to reset the
        # database sequences.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                no_style(), models)
            if sequence_sql:
                if verbosity >= 2:
                    self.stdout.write("Resetting sequences\n")
                for line in sequence_sql:
                    cursor.execute(line)

        if commit:
            transaction.commit(using=using)
            transaction.leave_transaction_management(using=using)

        if verbosity >= 1:
            if fixture_object_count == loaded_object_count:
                self.stdout.write("Installed %d object(s) from %d fixture(s)" %
                                  (loaded_object_count, fixture_count))
            else:
                self.stdout.write(
                    "Installed %d object(s) (of %d) from %d fixture(s)" %
                    (loaded_object_count, fixture_object_count, fixture_count))

        # Close the DB connection. This is required as a workaround for an
        # edge case in MySQL: if the same connection is used to
        # create tables, load data, and query, the query can return
        # incorrect results. See Django #7572, MySQL #37735.
        if commit:
            connection.close()
Esempio n. 49
0
 def test_loading(self):
     instance = list(serializers.deserialize('json',
                                             self.test_data))[0].object
     self.assertEqual(instance.field,
                      uuid.UUID('550e8400-e29b-41d4-a716-446655440000'))
def deserialize_fixture():
    fixture_file = os.path.join(fixture_dir, fixture_filename)

    with open(fixture_file, 'rb') as fixture:
        return list(
            serializers.deserialize('json', fixture, ignorenonexistent=True))
Esempio n. 51
0
    def handle(self, *fixture_files, **options):

        using = options.get('database', DEFAULT_DB_ALIAS)
        mode = options.get('mode', 'append')
        items_into_tree = options.get('items_into_tree', None)

        if items_into_tree is not None:
            try:
                items_into_tree = MODEL_TREE_CLASS.objects.get(
                    alias=items_into_tree)
            except ObjectDoesNotExist:
                raise CommandError(
                    f'Target tree aliased `{items_into_tree}` does not exist. Please create it before import.'
                )
            else:
                mode = 'append'

        connection = connections[using]
        cursor = connection.cursor()

        self.style = no_style()

        loaded_object_count = 0

        if mode == 'replace':
            MODEL_TREE_CLASS.objects.all().delete()
            MODEL_TREE_ITEM_CLASS.objects.all().delete()

        for fixture_file in fixture_files:

            self.stdout.write(f'Loading fixture from `{fixture_file}` ...\n')

            fixture = open(fixture_file, 'r')

            try:
                objects = serializers.deserialize('json', fixture, using=using)
            except (SystemExit, KeyboardInterrupt):
                raise

            trees = []
            tree_items = defaultdict(list)
            tree_item_parents = defaultdict(list)
            tree_items_new_indexes = {}

            try:
                allow_migrate = router.allow_migrate
            except AttributeError:
                # Django < 1.7
                allow_migrate = router.allow_syncdb

            for obj in objects:
                if allow_migrate(using, obj.object.__class__):
                    if isinstance(obj.object,
                                  (MODEL_TREE_CLASS, MODEL_TREE_ITEM_CLASS)):
                        if isinstance(obj.object, MODEL_TREE_CLASS):
                            trees.append(obj.object)
                        else:
                            if items_into_tree is not None:
                                obj.object.tree_id = items_into_tree.id
                            tree_items[obj.object.tree_id].append(obj.object)
                            tree_item_parents[obj.object.parent_id].append(
                                obj.object.id)

            if items_into_tree is not None:
                trees = [
                    items_into_tree,
                ]

            try:

                for tree in trees:

                    self.stdout.write(f'\nImporting tree `{tree.alias}` ...\n')
                    orig_tree_id = tree.id

                    if items_into_tree is None:
                        if mode == 'append':
                            tree.pk = None
                            tree.id = None

                        tree.save(using=using)
                        loaded_object_count += 1

                    parents_ahead = []

                    # Parents go first: enough for simple cases.
                    tree_items[orig_tree_id].sort(
                        key=lambda item: item.id not in tree_item_parents.keys(
                        ))

                    for tree_item in tree_items[orig_tree_id]:
                        parent_ahead = False
                        self.stdout.write(
                            f'Importing item `{tree_item.title}` ...\n')
                        tree_item.tree_id = tree.id
                        orig_item_id = tree_item.id

                        if mode == 'append':
                            tree_item.pk = None
                            tree_item.id = None

                            if tree_item.id in tree_items_new_indexes:
                                tree_item.pk = tree_item.id = tree_items_new_indexes[
                                    tree_item.id]

                            if tree_item.parent_id is not None:
                                if tree_item.parent_id in tree_items_new_indexes:
                                    tree_item.parent_id = tree_items_new_indexes[
                                        tree_item.parent_id]
                                else:
                                    parent_ahead = True

                        tree_item.save(using=using)
                        loaded_object_count += 1

                        if mode == 'append':
                            tree_items_new_indexes[orig_item_id] = tree_item.id
                            if parent_ahead:
                                parents_ahead.append(tree_item)

                    # Second pass is necessary for tree items being imported before their parents.
                    for tree_item in parents_ahead:
                        tree_item.parent_id = tree_items_new_indexes[
                            tree_item.parent_id]
                        tree_item.save(using=using)

            except (SystemExit, KeyboardInterrupt):
                raise

            except Exception:
                import traceback
                fixture.close()

                self.stderr.write(
                    self.style.ERROR(
                        f"Fixture `{fixture_file}` import error: "
                        f"{''.join(traceback.format_exception(*sys.exc_info()))}\n"
                    ))

            fixture.close()

        # Reset DB sequences, for DBMS with sequences support.
        if loaded_object_count > 0:
            sequence_sql = connection.ops.sequence_reset_sql(
                self.style, [MODEL_TREE_CLASS, MODEL_TREE_ITEM_CLASS])
            if sequence_sql:
                self.stdout.write('Resetting DB sequences ...\n')
                for line in sequence_sql:
                    cursor.execute(line)

        connection.close()
Esempio n. 52
0
 def test_roundtrip_with_null(self):
     instance = HStoreModel(field={'a': 'b', 'c': None})
     data = serializers.serialize('json', [instance])
     new_instance = list(serializers.deserialize('json', data))[0].object
     self.assertEqual(instance.field, new_instance.field)
Esempio n. 53
0
 def test_loading(self):
     instance = list(serializers.deserialize('json',
                                             self.test_data))[0].object
     self.assertEqual(instance.field, {'a': 'b'})
Esempio n. 54
0
 def test_yaml_deserializer_exception(self):
     with self.assertRaises(DeserializationError):
         for obj in serializers.deserialize("yaml", "{"):
             pass
Esempio n. 55
0
 def get_object_version(self):
     """Returns the stored version of the model."""
     data = self.serialized_data
     if isinstance(data, unicode):
         data = data.encode("utf8")
     return list(serializers.deserialize(self.format, data))[0]
Esempio n. 56
0
 def test_deserializer_pyyaml_error_message(self):
     """Using yaml deserializer without pyyaml raises ImportError"""
     with self.assertRaises(ImportError):
         serializers.deserialize("yaml", "")
 def test_loading(self):
     for value, serialized in self.test_values:
         with self.subTest(value=value):
             instance = list(serializers.deserialize('json', self.test_data % serialized))[0].object
             self.assertEqual(instance.field, value)
Esempio n. 58
0
 def test_loading(self):
     instances = list(serializers.deserialize('json', self.test_data))
     instance = instances[0].object
     assert instance.attrs == {'a': 'b', 'c': None}
Esempio n. 59
0
            content = {"flag": "Success", "context": config}
        except Exception, e:
            content = {"flag": "Error", "context": str(e)}

    elif action == "import":
        try:
            post = json.loads(request.body)

            m_config = post['main_config']
            u_config = post['upstream_config']
            p_config = post['proxy_config']

            main_config_qc.delete()
            upstream_config_qc.delete()
            proxy_config_qc.delete()

            for obj in serializers.deserialize("json", m_config):
                obj.save()
            for obj in serializers.deserialize("json", u_config):
                obj.save()
            for obj in serializers.deserialize("json", p_config):
                obj.save()

            reload_config()

            content = {"flag": "Success"}
        except Exception, e:
            content = {"flag": "Error", "context": str(e)}

    return HttpResponse(json.dumps(content))
Esempio n. 60
0
def config_check_state():
    from sdncon.controller.notification import do_modify_notification, do_delete_notification
    last_config_state = config_read_state()
    try:
        last_config_instances = last_config_state.get('instances')
    except Exception, _e:
        last_config_instances = {}

    current_config_instances = {}

    for config_model in config_models:
        try:
            serialized_old_instances = json.dumps(
                last_config_instances.get(config_model.__name__, []))
            old_instance_info = serializers.deserialize(
                'json', serialized_old_instances)
            old_instances = [info.object for info in old_instance_info]
        except Exception, _e:
            old_instances = []

        new_instances = config_model.objects.all()

        for new_instance in new_instances:
            for index, old_instance in enumerate(old_instances):
                if new_instance.pk == old_instance.pk:
                    if not model_instances_equal(new_instance, old_instance):
                        config_do_update(config_model, old_instance,
                                         new_instance)
                        do_modify_notification(config_model, new_instance)
                    del old_instances[index]
                    break