예제 #1
0
def test_iter(broker):
    broker.purge_queue()
    broker.cache.clear()
    it = [i for i in range(10)]
    it2 = [(1, -1), (2, -1), (3, -4), (5, 6)]
    it3 = (1, 2, 3, 4, 5)
    t = async_iter('math.floor', it, sync=True)
    t2 = async_iter('math.copysign', it2, sync=True)
    t3 = async_iter('math.floor', it3, sync=True)
    t4 = async_iter('math.floor', (1,), sync=True)
    result_t = result(t)
    assert result_t is not None
    task_t = fetch(t)
    assert task_t.result == result_t
    assert result(t2) is not None
    assert result(t3) is not None
    assert result(t4)[0] == 1
    # test iter class
    i = Iter('math.copysign', sync=True, cached=True)
    i.append(1, -1)
    i.append(2, -1)
    i.append(3, -4)
    i.append(5, 6)
    assert i.started is False
    assert i.length() == 4
    assert i.run() is not None
    assert len(i.result()) == 4
    assert len(i.fetch().result) == 4
    i.append(1, -7)
    assert i.result() is None
    i.run()
    assert len(i.result()) == 5
예제 #2
0
def test_cached(broker):
    broker.purge_queue()
    broker.cache.clear()
    group = 'cache_test'
    # queue the tests
    task_id = async_task('math.copysign', 1, -1, cached=True, broker=broker)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.copysign', 1, -1, cached=True, broker=broker, group=group)
    async_task('math.popysign', 1, -1, cached=True, broker=broker, group=group)
    iter_id = async_iter('math.floor', [i for i in range(10)], cached=True)
    # test wait on cache
    # test wait timeout
    assert result(task_id, wait=10, cached=True) is None
    assert fetch(task_id, wait=10, cached=True) is None
    assert result_group(group, wait=10, cached=True) is None
    assert result_group(group, count=2, wait=10, cached=True) is None
    assert fetch_group(group, wait=10, cached=True) is None
    assert fetch_group(group, count=2, wait=10, cached=True) is None
    # run a single inline cluster
    task_count = 17
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    for i in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    result_queue = Queue()
    worker(task_queue, result_queue, Value('f', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # assert results
    assert result(task_id, wait=500, cached=True) == -1
    assert fetch(task_id, wait=500, cached=True).result == -1
    # make sure it's not in the db backend
    assert fetch(task_id) is None
    # assert group
    assert count_group(group, cached=True) == 6
    assert count_group(group, cached=True, failures=True) == 1
    assert result_group(group, cached=True) == [-1, -1, -1, -1, -1]
    assert len(result_group(group, cached=True, failures=True)) == 6
    assert len(fetch_group(group, cached=True)) == 6
    assert len(fetch_group(group, cached=True, failures=False)) == 5
    delete_group(group, cached=True)
    assert count_group(group, cached=True) is None
    delete_cached(task_id)
    assert result(task_id, cached=True) is None
    assert fetch(task_id, cached=True) is None
    # iter cached
    assert result(iter_id) is None
    assert result(iter_id, cached=True) is not None
    broker.cache.clear()
예제 #3
0
def test_cluster(broker):
    broker.list_key = 'cluster_test:q'
    broker.delete_queue()
    task = async_task('django_q.tests.tasks.count_letters',
                      DEFAULT_WORDLIST,
                      broker=broker)['id']
    assert broker.queue_size() == 1
    task_queue = Queue()
    assert task_queue.qsize() == 0
    result_queue = Queue()
    assert result_queue.qsize() == 0
    event = Event()
    event.set()
    # Test push
    pusher(task_queue, event, broker=broker)
    assert task_queue.qsize() == 1
    assert queue_size(broker=broker) == 0
    # Test work
    task_queue.put('STOP')
    worker(task_queue, result_queue, Value('f', -1))
    assert task_queue.qsize() == 0
    assert result_queue.qsize() == 1
    # Test monitor
    result_queue.put('STOP')
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # check result
    assert result(task) == 1506
    broker.delete_queue()
예제 #4
0
    def test_unhandled_exception_running_code(self):

        with patch('grading_module.ag.grade',
                   side_effect=Exception('Drat!')) as patched_ag:
            batch = self.batch.id

            task_id = grading_queue.queue_grading_task(
                batch, self.asgt.id, self.std.id, self.pc.id,
                testing=True)  #asgt 1, student 1, class 1
            # wait...
            task_result = result(task_id, 2000)

            # Hook should have saved Grade
            grade = Grade.objects.get(batch=batch)
            grade_dict = model_to_dict(grade)
            grade_dict.pop('student_github_url')
            grade_dict.pop('id')

            expected_grade = {
                'ag_error': 'Drat!',
                'batch': batch,
                'assignment': self.asgt.id,
                'student': self.std.id,
                'programming_class': self.pc.id,
                'score': Decimal(0),
                'generated_report': None,
                'github_commit_hash': None,
                'instructor_comments': None
            }

            self.assertDictEqual(expected_grade, grade_dict)
            self.batch.refresh_from_db()
            self.assertEqual(1, self.batch.processed)
예제 #5
0
파일: views.py 프로젝트: eaudeweb/gemet
    def get_context_data(self, **kwargs):
        context = super(DownloadView, self).get_context_data(**kwargs)
        show_message = False

        if self.version == 'latest':
            current_version = Version.objects.get(is_current=True)
            try:
                async_task = current_version.asynctask
                if async_task.status == AsyncTask.QUEUED:
                    if result(async_task.task):
                        async_task.status = AsyncTask.FINISHED
                        async_task.save()
                    else:
                        show_message = True
            except ObjectDoesNotExist:
                pass

        is_latest = False
        current_version = Version.objects.get(is_current=True)
        if self.version == 'latest' or self.version == current_version.identifier:
            is_latest = True

        if self.version == 'latest':
            version_identifier = current_version.identifier
        else:
            version_identifier = self.version

        context.update({
            'version': self.version,
            'show_message': show_message,
            'is_latest': is_latest,
            'version_identifier': version_identifier
        })
        return context
예제 #6
0
    def get_part_processed_multiple_batch(self, request, **kwargs):
        """
        Get the part processed data from elasticsearch and the stats about the
        multiple batch
        """
        # TODO: Uncached for now. Invalidation that works for everyone may be
        #       impossible.
        bundle = self.build_bundle(request=request)
        session_key = request.COOKIES[settings.SESSION_COOKIE_NAME]
        # self.authorized_create_detail(self.get_object_list(bundle.request), bundle)
        if(kwargs.get("multi_batch", None)):
            mb = kwargs.get("multi_batch")
            id = mb.id
        else:
            id = request.GET.get("current_batch")
            mb = CBHCompoundMultipleBatch.objects.get(pk=id)

        task_id = request.session.get("mb_inprogress_%d" % mb.id, None)

        if task_id:
            res = result(task_id, wait=10)
            if isinstance(res, basestring):
                raise Exception(res)
        if not mb.uploaded_data:
            #The uploaded data field will be set once the data is fully processed
            return self.create_response(request, {}, response_class=http.HttpAccepted)


        to_be_serialized = mb.uploaded_data
        to_be_serialized = self.get_cached_temporary_batch_data(
            id, request.GET, session_key, bundledata=to_be_serialized)
        index_name = elasticsearch_client.get_temp_index_name(session_key, id)
        elasticsearch_client.get_action_totals(index_name, to_be_serialized)
        return self.create_response(request, to_be_serialized)
예제 #7
0
파일: tasks.py 프로젝트: biozz/django-q
def parzen_async():
    mu_vec = numpy.array([0, 0])
    cov_mat = numpy.array([[1, 0], [0, 1]])
    sample = numpy.random.multivariate_normal(mu_vec, cov_mat, 10000)
    widths = numpy.linspace(1.0, 1.2, 100)
    x = numpy.array([[0], [0]])
    # async_task them with async_task iterable
    args = [(sample, x, w) for w in widths]
    result_id = async_iter(parzen_estimation, args, cached=True)
    # return the cached result or timeout after 10 seconds
    return result(result_id, wait=10000, cached=True)
def test_azure_sync_resource_create(get_subscription_and_session,
                                    get_resource_by_id, get_resources_list,
                                    mock_response_class, json_file,
                                    subscription, resource_group, broker,
                                    require_resource_types):

    #resource_group = mce_app_azure_resource_group

    data_resource_list = json_file("resource-list.json")
    data_resource = json_file("resource-vm.json")
    """
    resource_id = data_resource['id']
    group_name = resource_id.split('/')[4]
    group_id = f"/subscriptions/{subscription.pk}/resourceGroups/{group_name}"
    group = ResourceGroupAzure.objects.get(id__iexact=group_id)
    """

    count = len(data_resource_list['value'])
    get_subscription_and_session.return_value = (subscription,
                                                 requests.Session())
    get_resources_list.return_value = data_resource_list['value']
    get_resource_by_id.return_value = data_resource

    task_id = async_task('mce_tasks_djq.azure.sync_resource',
                         subscription.pk,
                         task_name='test.azure.sync.resource',
                         broker=broker,
                         sync=True)

    task = fetch(task_id)
    assert task.success is True, result(task_id)
    assert result(task_id) == dict(errors=0,
                                   created=count,
                                   updated=0,
                                   deleted=0)

    assert ResourceAzure.objects.count() == count

    assert ResourceEventChange.objects.filter(
        action=constants.EventChangeType.CREATE).count(
        ) == count + 1  # ResourceGroup
예제 #9
0
    def multi_batch_save(self, request, **kwargs):
        """Save the data which has been cached in Elasticsearch"""

        deserialized = self.deserialize(request,
                                        request.body,
                                        format=request.META.get(
                                            'CONTENT_TYPE',
                                            'application/json'))
        session_key = request.COOKIES[settings.SESSION_COOKIE_NAME]
        deserialized = self.alter_deserialized_detail_data(
            request, deserialized)

        bundle = self.build_bundle(data=dict_strip_unicode_keys(deserialized),
                                   request=request)
        if bundle.obj.pk:
            self.authorized_update_detail(self.get_object_list(bundle.request),
                                          bundle)
        else:
            self.authorized_create_detail(self.get_object_list(bundle.request),
                                          bundle)

        id = bundle.data["multiplebatch"]
        mb = CBHCompoundMultipleBatch.objects.get(pk=id)
        creator_user = request.user
        try:
            if mb.batch_count < 100:
                #if small batch just do it syncronously
                res = save_multiple_batch(mb, creator_user, session_key)
            else:

                if not bundle.data.get("task_id_for_save", None):

                    mb.created_by = creator_user.username

                    bundle.data["task_id_for_save"] = async (
                        'cbh_chem_api.tasks.save_multiple_batch', mb,
                        creator_user, session_key)

                res = result(bundle.data["task_id_for_save"], wait=10)
            if res is True:
                return self.create_response(request,
                                            bundle,
                                            response_class=http.HttpCreated)
            if (isinstance(res, basestring)):
                raise Exception(res)
            return self.create_response(request,
                                        bundle,
                                        response_class=http.HttpAccepted)
        except:
            print "cleaning up due error during save transaction"
            clean_up_multi_batch(mb, session_key)
            raise
예제 #10
0
    def multi_batch_save(self, request, **kwargs):
        """Save the data which has been cached in Elasticsearch"""
        
        deserialized = self.deserialize(request, request.body, format=request.META.get(
            'CONTENT_TYPE', 'application/json'))
        session_key = request.COOKIES[settings.SESSION_COOKIE_NAME]
        deserialized = self.alter_deserialized_detail_data(
            request, deserialized)

        bundle = self.build_bundle(
            data=dict_strip_unicode_keys(deserialized), request=request)
        if bundle.obj.pk:
            self.authorized_update_detail(
                self.get_object_list(bundle.request), bundle)
        else:
            self.authorized_create_detail(
                self.get_object_list(bundle.request), bundle)
        
        id = bundle.data["multiplebatch"]
        mb = CBHCompoundMultipleBatch.objects.get(pk=id)
        creator_user = request.user
        try:
            if mb.batch_count < 100:
                #if small batch just do it syncronously
                res = save_multiple_batch(mb, creator_user, session_key)
            else:

                if not bundle.data.get("task_id_for_save", None):

                    mb.created_by = creator_user.username

                    bundle.data["task_id_for_save"] = async('cbh_chem_api.tasks.save_multiple_batch',  mb, creator_user, session_key)
                   
                
                res = result(bundle.data["task_id_for_save"], wait=10)
            if res is True:
                return self.create_response(request, bundle, response_class=http.HttpCreated)
            if (isinstance(res, basestring)):
                raise Exception(res)
            return self.create_response(request, bundle, response_class=http.HttpAccepted)
        except:
            print "cleaning up due error during save transaction"
            clean_up_multi_batch(mb, session_key)
            raise
예제 #11
0
    def test_success(self):

        with patch('grading_module.ag.grade',
                   return_value={
                       'success': True,
                       'report': 'blah blah',
                       'sha': 'abc123',
                       'score': 12
                   }) as patched_ag:

            batch = self.batch.id

            task_id = grading_queue.queue_grading_task(
                batch, self.asgt.id, self.std.id, self.pc.id,
                testing=True)  #asgt 1, student 1, class 1
            # wait...
            task_result = result(task_id, 2000)

            # Hook should have saved Grade
            grade = Grade.objects.get(batch=batch)
            grade_dict = model_to_dict(grade)
            grade_dict.pop('student_github_url')
            grade_dict.pop('id')

            expected_grade = {
                'ag_error': None,
                'batch': batch,
                'assignment': self.asgt.id,
                'student': self.std.id,
                'programming_class': self.pc.id,
                'score': Decimal(12),
                'generated_report': 'blah blah',
                'github_commit_hash': 'abc123',
                'instructor_comments': None
            }

            self.assertDictEqual(expected_grade, grade_dict)
            self.batch.refresh_from_db()
            self.assertEqual(1, self.batch.processed)
예제 #12
0
def test_sync(broker):
    task = async_task('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, broker=broker, sync=True)
    assert result(task) == 1506
예제 #13
0
def test_enqueue(broker, admin_user):
    broker.list_key = 'cluster_test:q'
    broker.delete_queue()
    a = async_task('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, hook='django_q.tests.test_cluster.assert_result',
                   broker=broker)
    b = async_task('django_q.tests.tasks.count_letters2', WordClass(), hook='django_q.tests.test_cluster.assert_result',
                   broker=broker)
    # unknown argument
    c = async_task('django_q.tests.tasks.count_letters', DEFAULT_WORDLIST, 'oneargumentoomany',
                   hook='django_q.tests.test_cluster.assert_bad_result', broker=broker)
    # unknown function
    d = async_task('django_q.tests.tasks.does_not_exist', WordClass(), hook='django_q.tests.test_cluster.assert_bad_result',
                   broker=broker)
    # function without result
    e = async_task('django_q.tests.tasks.countdown', 100000, broker=broker)
    # function as instance
    f = async_task(multiply, 753, 2, hook=assert_result, broker=broker)
    # model as argument
    g = async_task('django_q.tests.tasks.get_task_name', Task(name='John'), broker=broker)
    # args,kwargs, group and broken hook
    h = async_task('django_q.tests.tasks.word_multiply', 2, word='django', hook='fail.me', broker=broker)
    # args unpickle test
    j = async_task('django_q.tests.tasks.get_user_id', admin_user, broker=broker, group='test_j')
    # q_options and save opt_out test
    k = async_task('django_q.tests.tasks.get_user_id', admin_user,
                   q_options={'broker': broker, 'group': 'test_k', 'save': False, 'timeout': 90})
    # test unicode
    assert Task(name='Amalia').__str__()=='Amalia'
    # check if everything has a task id
    assert isinstance(a, str)
    assert isinstance(b, str)
    assert isinstance(c, str)
    assert isinstance(d, str)
    assert isinstance(e, str)
    assert isinstance(f, str)
    assert isinstance(g, str)
    assert isinstance(h, str)
    assert isinstance(j, str)
    assert isinstance(k, str)
    # run the cluster to execute the tasks
    task_count = 10
    assert broker.queue_size() == task_count
    task_queue = Queue()
    stop_event = Event()
    stop_event.set()
    # push the tasks
    for i in range(task_count):
        pusher(task_queue, stop_event, broker=broker)
    assert broker.queue_size() == 0
    assert task_queue.qsize() == task_count
    task_queue.put('STOP')
    # test wait timeout
    assert result(j, wait=10) is None
    assert fetch(j, wait=10) is None
    assert result_group('test_j', wait=10) is None
    assert result_group('test_j', count=2, wait=10) is None
    assert fetch_group('test_j', wait=10) is None
    assert fetch_group('test_j', count=2, wait=10) is None
    # let a worker handle them
    result_queue = Queue()
    worker(task_queue, result_queue, Value('f', -1))
    assert result_queue.qsize() == task_count
    result_queue.put('STOP')
    # store the results
    monitor(result_queue)
    assert result_queue.qsize() == 0
    # Check the results
    # task a
    result_a = fetch(a)
    assert result_a is not None
    assert result_a.success is True
    assert result(a) == 1506
    # task b
    result_b = fetch(b)
    assert result_b is not None
    assert result_b.success is True
    assert result(b) == 1506
    # task c
    result_c = fetch(c)
    assert result_c is not None
    assert result_c.success is False
    # task d
    result_d = fetch(d)
    assert result_d is not None
    assert result_d.success is False
    # task e
    result_e = fetch(e)
    assert result_e is not None
    assert result_e.success is True
    assert result(e) is None
    # task f
    result_f = fetch(f)
    assert result_f is not None
    assert result_f.success is True
    assert result(f) == 1506
    # task g
    result_g = fetch(g)
    assert result_g is not None
    assert result_g.success is True
    assert result(g) == 'John'
    # task h
    result_h = fetch(h)
    assert result_h is not None
    assert result_h.success is True
    assert result(h) == 12
    # task j
    result_j = fetch(j)
    assert result_j is not None
    assert result_j.success is True
    assert result_j.result == result_j.args[0].id
    # check fetch, result by name
    assert fetch(result_j.name) == result_j
    assert result(result_j.name) == result_j.result
    # groups
    assert result_group('test_j')[0] == result_j.result
    assert result_j.group_result()[0] == result_j.result
    assert result_group('test_j', failures=True)[0] == result_j.result
    assert result_j.group_result(failures=True)[0] == result_j.result
    assert fetch_group('test_j')[0].id == [result_j][0].id
    assert fetch_group('test_j', failures=False)[0].id == [result_j][0].id
    assert count_group('test_j') == 1
    assert result_j.group_count() == 1
    assert count_group('test_j', failures=True) == 0
    assert result_j.group_count(failures=True) == 0
    assert delete_group('test_j') == 1
    assert result_j.group_delete() == 0
    deleted_group = delete_group('test_j', tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    deleted_group = result_j.group_delete(tasks=True)
    assert deleted_group is None or deleted_group[0] == 0  # Django 1.9
    # task k should not have been saved
    assert fetch(k) is None
    assert fetch(k, 100) is None
    assert result(k, 100) is None
    broker.delete_queue()
예제 #14
0
 def get_result(task_id, wait=0, cached=Conf.CACHED):
     # Wrapper method to get result of a task with awareness of schema
     schema_name = connection.schema_name
     with schema_context(schema_name):
         return result(task_id, wait, cached)
예제 #15
0
    def get_list(self, request, **kwargs):
        """
        Returns a serialized list of resources.
        Calls ``obj_get_list`` to provide the data, then handles that result
        set and serializes it.
        Should return a HttpResponse (200 OK).
        Get a list of CBHCompoundbatches from elasticsearch by running a query
        """

        base_bundle = self.build_bundle(request=request)
        pids = request.GET.get("pids", "")
        project_ids = []
        if pids:

            project_ids = [int(pid) for pid in pids.split(",")]

        allowed_pids = set(self._meta.authorization.project_ids(request))

        for requested_pid in project_ids:
            if requested_pid not in allowed_pids:
                raise Unauthorized("No permissions for requested project")

        if len(project_ids) == 0:
            project_ids = allowed_pids

        queries = json.loads(
            b64decode(request.GET.get("encoded_query", EMPTY_ARRAY_B64)))

        extra_queries = kwargs.get("extra_queries", False)
        if extra_queries:
            #extra queries can be added via kwargs
            queries += extra_queries

        #Search for whether this item is archived or not ("archived is indexed as a string")
        archived = request.GET.get("archived", "false")
        queries.append({
            "query_type": "phrase",
            "field_path": "properties.archived",
            "phrase": archived
        })

        sorts = json.loads(
            b64decode(request.GET.get("encoded_sorts", EMPTY_ARRAY_B64)))
        if len(sorts) == 0:
            sorts = [{"field_path": "id", "sort_direction": "desc"}]
        textsearch = b64decode(request.GET.get("textsearch", ""))
        limit = request.GET.get("limit", 10)
        offset = request.GET.get("offset", 0)
        autocomplete = request.GET.get("autocomplete", "")
        autocomplete_field_path = request.GET.get("autocomplete_field_path",
                                                  "")
        autocomplete_size = request.GET.get("autocomplete_size",
                                            settings.MAX_AUTOCOMPLETE_SIZE)

        pr = ChemregProjectResource()
        resp = pr.get_list(request, do_cache=True)
        project_content = json.loads(resp.content)
        restricted_fieldnames = project_content["user_restricted_fieldnames"]

        #The project ids list needs to be reduced down
        #Because we dont support OR queries then every time you query a project for
        #a field then only the projects that have that field (and unrestricted) need to be shown
        for q in queries:
            project_ids = self._meta.authorization.check_if_field_restricted(
                q["field_path"], project_ids, restricted_fieldnames)
        if autocomplete_field_path:
            project_ids = self._meta.authorization.check_if_field_restricted(
                autocomplete_field_path, project_ids, restricted_fieldnames)

        concatenated_indices = elasticsearch_client.get_list_of_indicies(
            project_ids)

        chemical_search_id = request.GET.get("chemical_search_id", False)
        batch_ids_by_project = None
        if chemical_search_id:
            batch_result = result(chemical_search_id, wait=20000)
            if not batch_result:
                return HttpResponse(
                    '{"error": "Unable to process structure search"}',
                    status=503)
            else:
                batch_ids_by_project = batch_result[0]
        if request.GET.get("format", None) != "sdf" and request.GET.get(
                "format", None) != "xlsx":
            data = elasticsearch_client.get_list_data_elasticsearch(
                queries,
                concatenated_indices,
                sorts=sorts,
                offset=offset,
                limit=limit,
                textsearch=textsearch,
                autocomplete=autocomplete,
                autocomplete_field_path=autocomplete_field_path,
                autocomplete_size=autocomplete_size,
                batch_ids_by_project=batch_ids_by_project)

            if autocomplete_field_path:

                bucks = data["aggregations"]["filtered_field_path"][
                    "field_path_terms"]["buckets"]

                bundledata = {
                    "items":
                    bucks,
                    "autocomplete":
                    autocomplete,
                    "unique_count":
                    data["aggregations"]["filtered_field_path"]["unique_count"]
                    ["value"]
                }

            else:

                bundledata = self.prepare_es_hits(data)
                bundledata[
                    "objects"] = self._meta.authorization.removed_restricted_fields_if_present(
                        bundledata["objects"], restricted_fieldnames)
        else:
            #Limit , offset and autocomplete have no effect for a project export
            data = self.get_project_specific_data(request, queries,
                                                  project_ids, sorts,
                                                  textsearch,
                                                  batch_ids_by_project)

            return self.create_response(request, data)

        return self.create_response(request, bundledata)
예제 #16
0
 def check_status(self, task_id):
     return result(task_id, -1)
예제 #17
0
    def get_list(self, request, **kwargs):
        """
        Returns a serialized list of resources.
        Calls ``obj_get_list`` to provide the data, then handles that result
        set and serializes it.
        Should return a HttpResponse (200 OK).
        Get a list of CBHCompoundbatches from elasticsearch by running a query
        """

        base_bundle = self.build_bundle(request=request)
        pids = request.GET.get("pids", "")
        project_ids = []
        if pids:

            project_ids = [int(pid) for pid in pids.split(",")]
        
        allowed_pids = set(self._meta.authorization.project_ids(request))
        
        for requested_pid in project_ids:
            if requested_pid not in allowed_pids:
                raise Unauthorized("No permissions for requested project") 



        if len(project_ids) == 0:
            project_ids = allowed_pids

        queries = json.loads(b64decode(request.GET.get("encoded_query", EMPTY_ARRAY_B64)))

        extra_queries = kwargs.get("extra_queries", False)
        if extra_queries:
            #extra queries can be added via kwargs
            queries += extra_queries

        #Search for whether this item is archived or not ("archived is indexed as a string")
        archived = request.GET.get("archived", "false")
        queries.append({"query_type": "phrase", "field_path": "properties.archived", "phrase": archived})

       

        sorts = json.loads(b64decode(request.GET.get("encoded_sorts", EMPTY_ARRAY_B64)))
        if len(sorts) == 0:
            sorts = [{"field_path":"id","sort_direction":"desc"}]
        textsearch = b64decode(request.GET.get("textsearch", ""))
        limit = request.GET.get("limit", 10)
        offset = request.GET.get("offset", 0)
        autocomplete = request.GET.get("autocomplete", "")
        autocomplete_field_path = request.GET.get("autocomplete_field_path", "")
        autocomplete_size = request.GET.get("autocomplete_size", settings.MAX_AUTOCOMPLETE_SIZE)

        pr = ChemregProjectResource()
        resp = pr.get_list(request, do_cache=True)
        project_content = json.loads(resp.content)
        restricted_fieldnames = project_content["user_restricted_fieldnames"]

        #The project ids list needs to be reduced down
        #Because we dont support OR queries then every time you query a project for 
        #a field then only the projects that have that field (and unrestricted) need to be shown
        for q in queries:
             project_ids = self._meta.authorization.check_if_field_restricted(q["field_path"], project_ids, restricted_fieldnames)
        if autocomplete_field_path:
            project_ids = self._meta.authorization.check_if_field_restricted(autocomplete_field_path, project_ids, restricted_fieldnames)

        concatenated_indices = elasticsearch_client.get_list_of_indicies(project_ids)

        chemical_search_id = request.GET.get("chemical_search_id", False)
        batch_ids_by_project = None
        if chemical_search_id:
            batch_result = result(chemical_search_id, wait=20000)
            if not batch_result:
                return HttpResponse('{"error": "Unable to process structure search"}', status=503)
            else:
                batch_ids_by_project = batch_result[0]
        if request.GET.get("format", None) != "sdf" and request.GET.get("format", None) != "xlsx":
            data = elasticsearch_client.get_list_data_elasticsearch(queries,
                concatenated_indices,
                sorts=sorts, 
                offset=offset, 
                limit=limit, 
                textsearch=textsearch, 
                autocomplete=autocomplete,
                autocomplete_field_path=autocomplete_field_path,
                autocomplete_size=autocomplete_size,
                batch_ids_by_project=batch_ids_by_project
                 )
            
            
            if autocomplete_field_path:
            
                bucks = data["aggregations"]["filtered_field_path"]["field_path_terms"]["buckets"]

                bundledata = {"items" : bucks,
                            "autocomplete" : autocomplete,
                            "unique_count" : data["aggregations"]["filtered_field_path"]["unique_count"]["value"]}
        
            else:

                bundledata = self.prepare_es_hits(data)
                bundledata["objects"] = self._meta.authorization.removed_restricted_fields_if_present(bundledata["objects"], restricted_fieldnames)
        else:
            #Limit , offset and autocomplete have no effect for a project export
            data = self.get_project_specific_data(request,  queries, project_ids, sorts, textsearch, batch_ids_by_project)

            return self.create_response(request, data) 


        return self.create_response(request, bundledata)