Exemple #1
0
def dump_data(request,appname):
    app_list = SortedDict()
    
    try:
        if request.POST:
            for appname in request.POST.getlist('apps'):
                app = get_app(appname)
                app_list[app] = None
            appname = 'choices'
        else:
            app = get_app(appname)
            app_list[app] = None
    except ImproperlyConfigured:
        if appname == 'all':
            for app in get_apps():
                app_list[app] = None

    if(len(app_list) > 0):
        objects = []
        for model in sort_dependencies(app_list.items()):
            if not model._meta.proxy and router.allow_syncdb(DEFAULT_DB_ALIAS, model):
                objects.extend(model._default_manager.using(DEFAULT_DB_ALIAS).all())
        serializers.get_serializer('json')
        json = serializers.serialize('json', objects, indent=2,use_natural_keys=True)
        response = HttpResponse(json, mimetype='application/json');
        response['Content-Disposition'] = 'attachment; filename=%s_%s_fixture.json' % (date.today().__str__(),appname)
        return response

    return render_to_response('diagnostic/dumpdata.html',context_instance=RequestContext(request))
Exemple #2
0
def get_dependencies(models):
    """
    Function to help finding model dependencies.
    """

    all_models = set()

    to_process = models
    while len(to_process):
        new_to_process = []
        for model in to_process:
            if model in all_models:
                continue

            all_models.add(model)

            for field in model._meta.fields:
                if hasattr(field.rel, 'to'):
                    new_to_process.append(field.rel.to)
            for field in model._meta.many_to_many:
                new_to_process.append(field.rel.to)

        to_process = new_to_process

    return dumpdata.sort_dependencies([(None, (model,)) for model in all_models])
Exemple #3
0
def get_dependencies(models):
    """
    Function to help finding model dependencies.
    """

    all_models = set()

    to_process = models
    while len(to_process):
        new_to_process = []
        for model in to_process:
            if model in all_models:
                continue

            all_models.add(model)

            for field in model._meta.fields:
                if hasattr(field.rel, 'to'):
                    new_to_process.append(field.rel.to)
            for field in model._meta.many_to_many:
                new_to_process.append(field.rel.to)

        to_process = new_to_process

    return dumpdata.sort_dependencies([(None, (model, ))
                                       for model in all_models])
Exemple #4
0
 def get_objects():
     for model in sort_dependencies(app_list):
         if (not model._meta.proxy and model._meta.managed and
                 router.allow_migrate(self.connection.alias, model)):
             queryset = model._default_manager.using(self.connection.alias).order_by(model._meta.pk.name)
             for obj in queryset.iterator():
                 yield obj
Exemple #5
0
 def test_dependency_sorting_5(self):
     sorted_deps = sort_dependencies(
         [('fixtures_regress', [Person, Book, Store])]
     )
     self.assertEqual(
         sorted_deps,
         [Store, Person, Book]
     )
Exemple #6
0
 def test_dependency_sorting_dangling(self):
     sorted_deps = sort_dependencies(
         [('fixtures_regress', [Person, Circle1, Store, Book])]
     )
     self.assertEqual(
         sorted_deps,
         [Circle1, Store, Person, Book]
     )
Exemple #7
0
 def test_dependency_sorting_normal(self):
     sorted_deps = sort_dependencies(
         [('fixtures_regress', [Person, ExternalDependency, Book])]
     )
     self.assertEqual(
         sorted_deps,
         [Person, Book, ExternalDependency]
     )
Exemple #8
0
 def get_objects():
     for model in sort_dependencies(app_list):
         if not model._meta.proxy and model._meta.managed and router.allow_migrate(
                 self.connection.alias, model):
             queryset = model._default_manager.using(
                 self.connection.alias).order_by(model._meta.pk.name)
             for obj in queryset.iterator():
                 yield obj
Exemple #9
0
 def test_dependency_sorting_5(self):
     sorted_deps = sort_dependencies(
         [('fixtures_regress', [Person, Book, Store])]
     )
     self.assertEqual(
         sorted_deps,
         [Store, Person, Book]
     )
Exemple #10
0
 def test_dependency_sorting_dangling(self):
     sorted_deps = sort_dependencies(
         [('fixtures_regress', [Person, Circle1, Store, Book])]
     )
     self.assertEqual(
         sorted_deps,
         [Circle1, Store, Person, Book]
     )
Exemple #11
0
 def test_dependency_sorting_normal(self):
     sorted_deps = sort_dependencies(
         [('fixtures_regress', [Person, ExternalDependency, Book])]
     )
     self.assertEqual(
         sorted_deps,
         [Person, Book, ExternalDependency]
     )
 def test_dependency_sorting(self):
     """
     Now lets check the dependency sorting explicitly
     It doesn't matter what order you mention the models
     Store *must* be serialized before then Person, and both
     must be serialized before Book.
     """
     sorted_deps = sort_dependencies([("fixtures_regress", [Book, Person, Store])])
     self.assertEqual(sorted_deps, [Store, Person, Book])
Exemple #13
0
def sort_relation_dependencies(app_list):
    models = sort_dependencies(app_list.items())
    models.reverse()
    #print "(before) #models: %d" % len(models)
    model_list, visited = [], []
    while models:
        model = models.pop(0)
        _sort_relation_order(model, model_list, visited)
    #print "(after) #models: %d" % len(model_list)
    return model_list
def sort_relation_dependencies(app_list):
    models = sort_dependencies(app_list.items())
    models.reverse()
    #print "(before) #models: %d" % len(models)
    model_list, visited = [], []
    while models:
        model = models.pop(0)
        _sort_relation_order(model, model_list, visited)
    #print "(after) #models: %d" % len(model_list)
    return model_list
Exemple #15
0
 def test_dependency_sorting(self):
     """
     Now lets check the dependency sorting explicitly
     It doesn't matter what order you mention the models
     Store *must* be serialized before then Person, and both
     must be serialized before Book.
     """
     sorted_deps = sort_dependencies([('fixtures_regress',
                                       [Book, Person, Store])])
     self.assertEqual(sorted_deps, [Store, Person, Book])
Exemple #16
0
    def test_dependency_sorting_m2m_simple(self):
        """
        M2M relations without explicit through models SHOULD count as dependencies

        Regression test for bugs that could be caused by flawed fixes to
        #14226, namely if M2M checks are removed from sort_dependencies
        altogether.
        """
        sorted_deps = sort_dependencies([('fixtures_regress',
                                          [M2MSimpleA, M2MSimpleB])])
        self.assertEqual(sorted_deps, [M2MSimpleB, M2MSimpleA])
Exemple #17
0
 def test_dependency_sorting_m2m_complex(self):
     """
     M2M relations with explicit through models should NOT count as
     dependencies.  The through model itself will have dependencies, though.
     """
     sorted_deps = sort_dependencies([
         ('fixtures_regress', [M2MComplexA, M2MComplexB, M2MThroughAB])
     ])
     # Order between M2MComplexA and M2MComplexB doesn't matter. The through
     # model has dependencies to them though, so it should come last.
     self.assertEqual(sorted_deps[-1], M2MThroughAB)
Exemple #18
0
 def test_dependency_sorting_m2m_complex(self):
     """
     M2M relations with explicit through models should NOT count as
     dependencies.  The through model itself will have dependencies, though.
     """
     sorted_deps = sort_dependencies(
         [('fixtures_regress', [M2MComplexA, M2MComplexB, M2MThroughAB])]
     )
     # Order between M2MComplexA and M2MComplexB doesn't matter. The through
     # model has dependencies to them though, so it should come last.
     self.assertEqual(sorted_deps[-1], M2MThroughAB)
Exemple #19
0
    def test_dependency_sorting_m2m_simple(self):
        """
        M2M relations without explicit through models SHOULD count as dependencies

        Regression test for bugs that could be caused by flawed fixes to
        #14226, namely if M2M checks are removed from sort_dependencies
        altogether.
        """
        sorted_deps = sort_dependencies(
            [('fixtures_regress', [M2MSimpleA, M2MSimpleB])]
        )
        self.assertEqual(sorted_deps, [M2MSimpleB, M2MSimpleA])
    def handle(self, *args, **options):
        from django.db.models import get_app

        #TODO : Check export mode
        db.reset_queries()
        sourceDatabase = options.get('sourceDatabase')
        dataLimit = options.get('dataLimit')
        app = get_app(options.get('app'))

        con = connections[sourceDatabase]
        if con.vendor != 'oracle':
            print "Source database has to be oracle."
            return

        user = settings.DATABASES[sourceDatabase]['USER']
        passwd = settings.DATABASES[sourceDatabase]['PASSWORD']
        host = settings.DATABASES[sourceDatabase]['HOST']
        port = settings.DATABASES[sourceDatabase]['PORT']
        name = settings.DATABASES[sourceDatabase]['NAME']

        app_list = SortedDict((app, None) for app in [app])
        tables = []
        sorted = sort_dependencies(app_list.items())
        lastObjectName = sorted[-1].__name__
        filename = lastObjectName + ".postgresql_psycopg2.sql"
        chemblSQLPath  = os.path.join(os.path.dirname(app.__file__),'sql', filename)
        location = chemblSQLPath
        oracleHome = os.environ['ORACLE_HOME']

        if options.get('dumpfile'):
            if not options.get('dumpfile').endswith('.sql'):
                location = os.path.join(options.get('dumpfile'), filename)
            else:
                location = options.get('dumpfile')


        for model in reversed(sorted):
            if not model._meta.managed:
                continue
            tables.append(model._meta.db_table)

        print self.confTemplate % (oracleHome, host, name, port, user, passwd, user, " ".join(tables), dataLimit, location)

        if location != chemblSQLPath:
            print "different! location = " + location + ", chemblSQLPath = " + chemblSQLPath
            f = open(location, 'w')
            f.close()
            os.symlink(location, chemblSQLPath)


#-----------------------------------------------------------------------------------------------------------------------
Exemple #21
0
        def get_objects():
            # Collate the objects to be serialized.
            for model in sort_dependencies(app_list.items()):
                if model in excluded_models:
                    continue
                if not model._meta.proxy and router.allow_syncdb(using, model):
                    if use_base_manager:
                        objects = model._base_manager
                    else:
                        objects = QuerySet(model).all()

                    queryset = objects.using(using).order_by(model._meta.pk.name)
                    if primary_keys:
                        queryset = queryset.filter(pk__in=primary_keys)
                    for obj in queryset.iterator():
                        yield obj
        def get_objects():
            """
            Collate the objects for serialization, taken from dumpdata command
            and adjusted to sanitize User password hashes
            """
            # Collate the objects to be serialized.
            for model in sort_dependencies(app_list.items()):
                if not model._meta.proxy and router.allow_migrate(DEFAULT_DB_ALIAS, model):
                    objects = model._base_manager

                    queryset = objects.using(DEFAULT_DB_ALIAS).order_by(model._meta.pk.name)
                    for obj in queryset.iterator():

                        # Sanitize user objects
                        if model._meta.model_name == 'user':
                            obj.set_unusable_password()
                        yield obj
Exemple #23
0
        def get_objects():
            # Collate the objects to be serialized.
            for model in sort_dependencies(app_list.items()):
                if model in excluded_models:
                    continue
                if not model._meta.proxy and router.allow_syncdb(using, model):
                    if use_base_manager:
                        objects = model._base_manager
                    else:
                        objects = QuerySet(model).all()

                    queryset = objects.using(using).order_by(
                        model._meta.pk.name)
                    if primary_keys:
                        queryset = queryset.filter(pk__in=primary_keys)
                    for obj in queryset.iterator():
                        yield obj
Exemple #24
0
 def test_dependency_sorting_m2m_complex_circular_2(self):
     """
     Circular M2M relations with explicit through models should be serializable
     This test tests the circularity with explicit natural_key.dependencies
     """
     try:
         sorted_deps = sort_dependencies([('fixtures_regress', [
             M2MComplexCircular2A, M2MComplexCircular2B,
             M2MCircular2ThroughAB
         ])])
     except CommandError:
         self.fail("Serialization dependency solving algorithm isn't "
                   "capable of handling circular M2M setups with "
                   "intermediate models plus natural key dependency hints.")
     self.assertEqual(sorted_deps[:2],
                      [M2MComplexCircular2A, M2MComplexCircular2B])
     self.assertEqual(sorted_deps[2:], [M2MCircular2ThroughAB])
Exemple #25
0
 def test_dependency_sorting_m2m_complex_circular_2(self):
     """
     Circular M2M relations with explicit through models should be serializable
     This test tests the circularity with explicit natural_key.dependencies
     """
     try:
         sorted_deps = sort_dependencies([
             ('fixtures_regress', [
                 M2MComplexCircular2A,
                 M2MComplexCircular2B,
                 M2MCircular2ThroughAB])
         ])
     except CommandError:
         self.fail("Serialization dependency solving algorithm isn't "
                   "capable of handling circular M2M setups with "
                   "intermediate models plus natural key dependency hints.")
     self.assertEqual(sorted_deps[:2], [M2MComplexCircular2A, M2MComplexCircular2B])
     self.assertEqual(sorted_deps[2:], [M2MCircular2ThroughAB])
Exemple #26
0
    def handle(self, *app_labels, **options):
        self.options = options
        self.app_labels = app_labels
        self.relateds = []

        format = self.options.get('format')
        indent = self.options.get('indent')
        show_traceback = self.options.get('traceback')
        use_natural_keys = self.options.get('use_natural_keys')
        self.excluded = self.options.get('excluded').split(',')

        # Check that the serialization format exists; this is a shortcut to
        # avoid collating all the objects and _then_ failing.
        if format not in serializers.get_public_serializer_formats():
            raise CommandError("Unknown serialization format: %s" % format)

        try:
            serializers.get_serializer(format)
        except KeyError:
            raise CommandError("Unknown serialization format: %s" % format)

        # Now collate the objects to be serialized.
        self.dump_list = InstancesList()
        for qs in self.get_querysets():
            for item in qs:
                self.extract_fields(item)
        app_list = self.dump_list.model_list()
        obj_list = []
        ordered = sort_dependencies(app_list.items())
        explored = []
        for item in ordered:
            if item not in explored:
                app_list = self.dump_list.get_items_by_model_path(item)
                obj_list.extend(app_list)
                explored.append(item)
        try:
            return serializers.serialize(format,
                                         obj_list,
                                         indent=indent,
                                         use_natural_keys=use_natural_keys)
        except Exception, e:
            if show_traceback:
                raise
            raise CommandError("Unable to serialize database: %s" % e)
        def get_objects():
            """
            Collate the objects for serialization, taken from dumpdata command
            and adjusted to sanitize User password hashes
            """
            # Collate the objects to be serialized.
            for model in sort_dependencies(app_list.items()):
                if not model._meta.proxy and router.allow_migrate(
                        DEFAULT_DB_ALIAS, model):
                    objects = model._base_manager

                    queryset = objects.using(DEFAULT_DB_ALIAS).order_by(
                        model._meta.pk.name)
                    for obj in queryset.iterator():

                        # Sanitize user objects
                        if model._meta.model_name == 'user':
                            obj.set_unusable_password()
                        yield obj
    def handle(self, *app_labels, **options):
        self.options = options
        self.app_labels = app_labels
        self.relateds = []

        format = self.options.get('format')
        indent = self.options.get('indent')
        show_traceback = self.options.get('traceback')
        use_natural_keys = self.options.get('use_natural_keys')
        self.excluded = self.options.get('excluded').split(',')

        # Check that the serialization format exists; this is a shortcut to
        # avoid collating all the objects and _then_ failing.
        if format not in serializers.get_public_serializer_formats():
            raise CommandError("Unknown serialization format: %s" % format)

        try:
            serializers.get_serializer(format)
        except KeyError:
            raise CommandError("Unknown serialization format: %s" % format)

        # Now collate the objects to be serialized.
        self.dump_list = InstancesList()
        for qs in self.get_querysets():
            for item in qs:
                self.extract_fields(item)
        app_list = self.dump_list.model_list()
        obj_list = []
        ordered = sort_dependencies(app_list.items())
        explored = []
        for item in ordered:
            if item not in explored:
                app_list = self.dump_list.get_items_by_model_path(item)
                obj_list.extend(app_list)
                explored.append(item)
        try:
            return serializers.serialize(format, obj_list,
                                         indent=indent,
                                         use_natural_keys=use_natural_keys)
        except Exception, e:
            if show_traceback:
                raise
            raise CommandError("Unable to serialize database: %s" % e)
Exemple #29
0
    def test_dependency_sorting_m2m_complex_circular_1(self):
        """
        Circular M2M relations with explicit through models should be serializable
        """
        A, B, C, AtoB, BtoC, CtoA = (M2MComplexCircular1A, M2MComplexCircular1B,
                                     M2MComplexCircular1C, M2MCircular1ThroughAB,
                                     M2MCircular1ThroughBC, M2MCircular1ThroughCA)
        try:
            sorted_deps = sort_dependencies(
                [('fixtures_regress', [A, B, C, AtoB, BtoC, CtoA])]
            )
        except CommandError:
            self.fail("Serialization dependency solving algorithm isn't "
                      "capable of handling circular M2M setups with "
                      "intermediate models.")

        # The dependency sorting should not result in an error, and the
        # through model should have dependencies to the other models and as
        # such come last in the list.
        self.assertEqual(sorted_deps[:3], [A, B, C])
        self.assertEqual(sorted_deps[3:], [AtoB, BtoC, CtoA])
Exemple #30
0
def data_dump(request):
    if not request.user.is_authenticated() or not request.user.is_superuser:
        response = HttpResponse(json.dumps({"Error": "Not Authorized"}))
        response['Content-type'] = 'application/json'
        return response

    #if wanted all apps, but only want campus
    #app_list = SortedDict([(app, None) for app in get_apps()])
    app_list = SortedDict([(get_app('campus'), None)])

    # Now collate the objects to be serialized.
    objects = []

    # Needed because sqllite doesn't use
    def ordering(self):
        if hasattr(self, 'name'):
            return str(self.name).lower()
        elif hasattr(self, 'id'):
            return self.id
        else:
            return self.pk

    for model in sort_dependencies(app_list.items()):
        # skip groupedlocation model (not needed since Group uses natural keys)
        if model == GroupedLocation:
            continue
        # - make ordering case insensitive
        # - must also make special case for MapObj else the leaf class will be
        #   serialized, not the actual MapObj itself
        if model == MapObj:
            objects.extend(sorted(model.objects.mob_filter(), key=ordering))
        else:
            objects.extend(sorted(model.objects.all(), key=ordering))
    try:
        data = serializers.serialize('json',
                                     objects,
                                     indent=4,
                                     use_natural_keys=True)
    except Exception, e:
        data = serializers.serialize('json', "ERORR!")
    def _update_models(self):
        if (self.process_before() == False):
            # 前処理
            return

        # INSTALLED_APPから全モデルクラスを取得
        self._using = DEFAULT_DB_ALIAS  # ToDo: defaultだけではなく、マルチDB化
        models = sort_dependencies(self._app_list.items())
        ofn = "/tmp/django_s3_backup_%s_" % (
            datetime.now().strftime("%Y%m%d%H%M%S"))
        # print "Writing JSON to %s..." % ofn
        print "get all models Done."
        # appのmodel毎にjsonダンプ -> S3に保存
        stream_fp = None
        for model in models:
            # dbg start
            #if ( model.__name__ != "Customer" ):
            #    continue
            # dbg end
            if ((not model._meta.proxy)
                    and (router.allow_syncdb(self._using, model))):
                try:
                    # 全レコードをファイルofnへ書き出し
                    fsize = self.create_json_file(ofn, model, self._using)
                    if (fsize == None):
                        # レコード存在しない場合
                        continue
                    # ファイル内容をS3にアップロード
                    self.update_S3(model, ofn, fsize)
                except:
                    if (stream_fp):
                        stream_fp.close()
                        os.remove(ofn)
                    raise

        if (self.process_after() == False):
            # 後処理
            return
Exemple #32
0
    def test_dependency_sorting_m2m_complex_circular_1(self):
        """
        Circular M2M relations with explicit through models should be serializable
        """
        A, B, C, AtoB, BtoC, CtoA = (M2MComplexCircular1A,
                                     M2MComplexCircular1B,
                                     M2MComplexCircular1C,
                                     M2MCircular1ThroughAB,
                                     M2MCircular1ThroughBC,
                                     M2MCircular1ThroughCA)
        try:
            sorted_deps = sort_dependencies([('fixtures_regress',
                                              [A, B, C, AtoB, BtoC, CtoA])])
        except CommandError:
            self.fail("Serialization dependency solving algorithm isn't "
                      "capable of handling circular M2M setups with "
                      "intermediate models.")

        # The dependency sorting should not result in an error, and the
        # through model should have dependencies to the other models and as
        # such come last in the list.
        self.assertEqual(sorted_deps[:3], [A, B, C])
        self.assertEqual(sorted_deps[3:], [AtoB, BtoC, CtoA])
Exemple #33
0
def data_dump(request):
    if not request.user.is_authenticated() or not request.user.is_superuser:
        response = HttpResponse(json.dumps({"Error": "Not Authorized"}))
        response['Content-type'] = 'application/json'
        return response

    #if wanted all apps, but only want campus
    #app_list = SortedDict([(app, None) for app in get_apps()])
    app_list = SortedDict([(get_app('campus'), None)])

    # Now collate the objects to be serialized.
    objects = []

    # Needed because sqllite doesn't use
    def ordering(self):
        if hasattr(self, 'name'):
            return str(self.name).lower()
        elif hasattr(self, 'id'):
            return self.id
        else:
            return self.pk

    for model in sort_dependencies(app_list.items()):
        # skip groupedlocation model (not needed since Group uses natural keys)
        if model == GroupedLocation:
            continue
        # - make ordering case insensitive
        # - must also make special case for MapObj else the leaf class will be
        #   serialized, not the actual MapObj itself
        if model == MapObj:
            objects.extend( sorted(model.objects.mob_filter(), key=ordering) )
        else:
            objects.extend( sorted(model.objects.all(), key=ordering) )
    try:
        data = serializers.serialize('json', objects, indent=4, use_natural_keys=True)
    except Exception, e:
        data = serializers.serialize('json', "ERORR!")
Exemple #34
0
def dump_data(request, appname):
    app_list = SortedDict()

    try:
        if request.method == 'POST':
            for appname in request.POST.getlist('apps'):
                app = get_app(appname)
                app_list[app] = None
            appname = 'choices'
        else:
            app = get_app(appname)
            app_list[app] = None
    except ImproperlyConfigured:
        if appname == 'all':
            for app in get_apps():
                app_list[app] = None

    if (len(app_list) > 0):
        objects = []
        for model in sort_dependencies(app_list.items()):
            if not model._meta.proxy and router.allow_syncdb(
                    DEFAULT_DB_ALIAS, model):
                objects.extend(
                    model._default_manager.using(DEFAULT_DB_ALIAS).all())
        serializers.get_serializer('json')
        json = serializers.serialize('json',
                                     objects,
                                     indent=2,
                                     use_natural_keys=True)
        response = HttpResponse(json, mimetype='application/json')
        response[
            'Content-Disposition'] = 'attachment; filename=%s_%s_fixture.json' % (
                date.today().__str__(), appname)
        return response

    return render_to_response('diagnostic/dumpdata.html',
                              context_instance=RequestContext(request))
    def _update_models(self):
        if ( self.process_before() == False ):
            # 前処理
            return
        
        # INSTALLED_APPから全モデルクラスを取得
        self._using = DEFAULT_DB_ALIAS        # ToDo: defaultだけではなく、マルチDB化
        models = sort_dependencies(self._app_list.items())
        ofn = "/tmp/django_s3_backup_%s_" % ( datetime.now().strftime("%Y%m%d%H%M%S") )
        # print "Writing JSON to %s..." % ofn
        print "get all models Done."
        # appのmodel毎にjsonダンプ -> S3に保存
        stream_fp = None
        for model in models:
            # dbg start
            #if ( model.__name__ != "Customer" ):
            #    continue
            # dbg end
            if ( (not model._meta.proxy) and (router.allow_syncdb(self._using, model)) ):
                try:
                    # 全レコードをファイルofnへ書き出し
                    fsize = self.create_json_file(ofn, model, self._using)
                    if ( fsize == None ):
                        # レコード存在しない場合
                        continue
                    # ファイル内容をS3にアップロード
                    self.update_S3(model, ofn, fsize)
                except:
                    if ( stream_fp ):
                        stream_fp.close()
                        os.remove(ofn)
                    raise

        if ( self.process_after() == False ):
            # 後処理
            return
 def test_dependency_sorting_3(self):
     sorted_deps = sort_dependencies([("fixtures_regress", [Store, Book, Person])])
     self.assertEqual(sorted_deps, [Store, Person, Book])
Exemple #37
0
    def handle(self, *args, **options):
        """ Dump the objects, Luke. """

        def empty_parameters(processor):
            """ Remove processors parameters if they come from disk. """

            if bool(processor.source_uri):
                processor.parameters = None

            return processor

        limiting_usernames = os.environ.get('SELECTIVE_DUMP_1FLOW_USERS',
                                            u'admin').split(',')

        limiting_categories = os.environ.get('SELECTIVE_DUMP_1FLOW_CATEGORIES',
                                             u'1flow-stock').split(',')

        fixture_filename = os.path.join(
            settings.PROJECT_ROOT, 'core', 'fixtures',
            u'core_processors_pack__{0}__{1}.json'.format(
                u','.join(limiting_usernames),
                u','.join(limiting_categories),
            )
        )

        limiting_query1 = (
            Q(slug__in=limiting_categories)
            | Q(user__username__in=limiting_usernames)
        )

        limiting_query2 = (
            Q(categories__slug__in=limiting_categories)
            | Q(user__username__in=limiting_usernames)
        )

        # Use OrderedDict because sort_dependencies() doesn't
        # work as expected. See original SO answer for details.
        app_list = OrderedDict()

        total_count = 0

        app_list['core.ProcessorCategory'] = \
            ProcessorCategory.objects.filter(limiting_query1).order_by('id')

        total_count += app_list['core.ProcessorCategory'].count()

        app_list['core.Processor'] = [
            empty_parameters(p)
            for p in Processor.objects.filter(limiting_query2).order_by('id')
        ]

        total_count += len(app_list['core.Processor'])

        app_list['core.ProcessChain'] = \
            ProcessingChain.objects.filter(limiting_query2).order_by('id')

        total_count += app_list['core.ProcessChain'].count()

        app_list['core.ChainedItem'] = \
            ChainedItem.objects.filter(
                chain__in=app_list['core.ProcessChain']).order_by(
                    'chain', 'position').order_by('id')

        total_count += app_list['core.ChainedItem'].count()

        # ——————————————————————————————————————————————————————— Serialization

        data = serializers.serialize('json',
                                     sort_dependencies(app_list.items()),
                                     indent=2, use_natural_keys=True)

        with open(fixture_filename, 'w') as f:
            f.write(data)

        self.stdout.write(u'Exported %s items in %s.'
                          % (total_count, fixture_filename))