def test_edit_updated_geopoint_cache(self):
        query_args = {
            'xform': self.xform,
            'query': '{}',
            'fields': '[]',
            'count': True
        }
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "..", "fixtures", "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid.xml"
        )

        self._make_submission(xml_submission_file_path)
        self.assertEqual(self.response.status_code, 201)
        # query mongo for the _geopoint field
        query_args['count'] = False
        records = ParsedInstance.query_data(**query_args)
        self.assertEqual(len(records), 1)
        # submit the edited instance
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "..", "fixtures", "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid_edited.xml"
        )
        self._make_submission(xml_submission_file_path)
        self.assertEqual(self.response.status_code, 201)
        records = ParsedInstance.query_data(**query_args)
        self.assertEqual(len(records), 1)
        cached_geopoint = records[0][GEOLOCATION]
        # the cached geopoint should equal the gps field
        gps = records[0]['gps'].split(" ")
        self.assertEqual(float(gps[0]), float(cached_geopoint[0]))
        self.assertEqual(float(gps[1]), float(cached_geopoint[1]))
Esempio n. 2
0
    def test_edit_updated_geopoint_cache(self):
        query_args = {
            'xform': self.xform,
            'query': '{}',
            'fields': '[]',
            'count': True
        }
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "..", "fixtures",
            "tutorial", "instances", "tutorial_2012-06-27_11-27-53_w_uuid.xml")

        self._make_submission(xml_submission_file_path)
        self.assertEqual(self.response.status_code, 201)
        # query mongo for the _geopoint field
        query_args['count'] = False
        records = ParsedInstance.query_data(**query_args)
        self.assertEqual(len(records), 1)
        # submit the edited instance
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "..", "fixtures",
            "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid_edited.xml")
        self._make_submission(xml_submission_file_path)
        self.assertEqual(self.response.status_code, 201)
        records = ParsedInstance.query_data(**query_args)
        self.assertEqual(len(records), 1)
        cached_geopoint = records[0][GEOLOCATION]
        # the cached geopoint should equal the gps field
        gps = records[0]['gps'].split(" ")
        self.assertEqual(float(gps[0]), float(cached_geopoint[0]))
        self.assertEqual(float(gps[1]), float(cached_geopoint[1]))
    def test_edited_submission(self):
        """
        Test submissions that have been edited
        """
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "..", "fixtures", "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid.xml"
        )
        num_instances_history = InstanceHistory.objects.count()
        num_instances = Instance.objects.count()
        query_args = {
            'xform': self.xform,
            'query': '{}',
            'fields': '[]',
            'count': True
        }

        cursor = [r for r in ParsedInstance.query_data(**query_args)]
        num_data_instances = cursor[0]['count']
        # make first submission
        self._make_submission(xml_submission_file_path)
        self.assertEqual(self.response.status_code, 201)
        self.assertEqual(Instance.objects.count(), num_instances + 1)
        # no new record in instances history
        self.assertEqual(
            InstanceHistory.objects.count(), num_instances_history)
        # check count of mongo instances after first submission
        cursor = ParsedInstance.query_data(**query_args)
        self.assertEqual(cursor[0]['count'], num_data_instances + 1)
        # edited submission
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "..", "fixtures", "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid_edited.xml"
        )
        client = DigestClient()
        client.set_authorization('bob', 'bob', 'Digest')
        self._make_submission(xml_submission_file_path, client=client)
        self.assertEqual(self.response.status_code, 201)
        # we must have the same number of instances
        self.assertEqual(Instance.objects.count(), num_instances + 1)
        # should be a new record in instances history
        self.assertEqual(
            InstanceHistory.objects.count(), num_instances_history + 1)
        cursor = ParsedInstance.query_data(**query_args)
        self.assertEqual(cursor[0]['count'], num_data_instances + 1)
        # make sure we edited the mongo db record and NOT added a new row
        query_args['count'] = False
        cursor = ParsedInstance.query_data(**query_args)
        record = cursor[0]
        with open(xml_submission_file_path, "r") as f:
            xml_str = f.read()
        xml_str = clean_and_parse_xml(xml_str).toxml()
        edited_name = re.match(ur"^.+?<name>(.+?)</name>", xml_str).groups()[0]
        self.assertEqual(record['name'], edited_name)
Esempio n. 4
0
    def test_edited_submission(self):
        """
        Test submissions that have been edited
        """
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "..", "fixtures",
            "tutorial", "instances", "tutorial_2012-06-27_11-27-53_w_uuid.xml")
        num_instances_history = InstanceHistory.objects.count()
        num_instances = Instance.objects.count()
        query_args = {
            'xform': self.xform,
            'query': '{}',
            'fields': '[]',
            'count': True
        }

        cursor = [r for r in ParsedInstance.query_data(**query_args)]
        num_data_instances = cursor[0]['count']
        # make first submission
        self._make_submission(xml_submission_file_path)
        self.assertEqual(self.response.status_code, 201)
        self.assertEqual(Instance.objects.count(), num_instances + 1)
        # no new record in instances history
        self.assertEqual(InstanceHistory.objects.count(),
                         num_instances_history)
        # check count of mongo instances after first submission
        cursor = ParsedInstance.query_data(**query_args)
        self.assertEqual(cursor[0]['count'], num_data_instances + 1)
        # edited submission
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "..", "fixtures",
            "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid_edited.xml")
        client = DigestClient()
        client.set_authorization('bob', 'bob', 'Digest')
        self._make_submission(xml_submission_file_path, client=client)
        self.assertEqual(self.response.status_code, 201)
        # we must have the same number of instances
        self.assertEqual(Instance.objects.count(), num_instances + 1)
        # should be a new record in instances history
        self.assertEqual(InstanceHistory.objects.count(),
                         num_instances_history + 1)
        cursor = ParsedInstance.query_data(**query_args)
        self.assertEqual(cursor[0]['count'], num_data_instances + 1)
        # make sure we edited the mongo db record and NOT added a new row
        query_args['count'] = False
        cursor = ParsedInstance.query_data(**query_args)
        record = cursor[0]
        with open(xml_submission_file_path, "r") as f:
            xml_str = f.read()
        xml_str = clean_and_parse_xml(xml_str).toxml()
        edited_name = re.match(ur"^.+?<name>(.+?)</name>", xml_str).groups()[0]
        self.assertEqual(record['name'], edited_name)
Esempio n. 5
0
    def _get_data(self, query, fields, sort, start, limit, is_public_request):
        try:
            where, where_params = ParsedInstance._get_where_clause(query)
        except ValueError as e:
            raise ParseError(unicode(e))

        if where:
            self.object_list = self.object_list.extra(where=where,
                                                      params=where_params)
        if (sort or limit or start or fields) and not is_public_request:
            if self.object_list.count():
                xform = self.object_list[0].xform
                self.object_list = \
                    ParsedInstance.query_data(
                        xform, query=query, sort=sort,
                        start_index=start, limit=limit,
                        fields=fields)

        if not isinstance(self.object_list, types.GeneratorType):
            page = self.paginate_queryset(self.object_list)
            serializer = self.get_pagination_serializer(page)
        else:
            serializer = self.get_serializer(self.object_list, many=True)
            page = None

        return Response(serializer.data)
Esempio n. 6
0
 def _query_data(self, query='{}', start=0,
                 limit=ParsedInstance.DEFAULT_LIMIT,
                 fields='[]', count=False):
     # ParsedInstance.query_mongo takes params as json strings
     # so we dumps the fields dictionary
     count_args = {
         'xform': self.xform,
         'query': query,
         'start': self.start,
         'end': self.end,
         'fields': '[]',
         'sort': '{}',
         'count': True
     }
     count_object = list(ParsedInstance.query_data(**count_args))
     record_count = count_object[0]["count"]
     if record_count < 1:
         raise NoRecordsFoundError("No records found for your query")
     # if count was requested, return the count
     if count:
         return record_count
     else:
         query_args = {
             'xform': self.xform,
             'query': query,
             'fields': fields,
             'start': self.start,
             'end': self.end,
             # TODO: we might want to add this in for the user
             # to sepcify a sort order
             'sort': 'id',
             'start_index': start,
             'limit': limit,
             'count': False
         }
         # use ParsedInstance.query_mongo
         cursor = ParsedInstance.query_data(**query_args)
         return cursor
 def test_owner_can_delete(self):
     # Test if Form owner can delete
     # check record exist before delete and after delete
     count = Instance.objects.filter(deleted_at=None).count()
     instance = Instance.objects.filter(
         xform=self.xform).latest('date_created')
     self.assertEqual(instance.deleted_at, None)
     # delete
     params = {'id': instance.id}
     response = self.client.post(self.delete_url, params)
     self.assertEqual(response.status_code, 200)
     self.assertEqual(
         Instance.objects.filter(deleted_at=None).count(), count - 1)
     instance = Instance.objects.get(id=instance.id)
     self.assertTrue(isinstance(instance.deleted_at, datetime))
     self.assertNotEqual(instance.deleted_at, None)
     query = '{"_id": %s}' % instance.id
     self.data_args.update({"query": query})
     after = [r for r in ParsedInstance.query_data(**self.data_args)]
     self.assertEqual(len(after), count - 1)
Esempio n. 8
0
 def test_owner_can_delete(self):
     # Test if Form owner can delete
     # check record exist before delete and after delete
     count = Instance.objects.filter(deleted_at=None).count()
     instance = Instance.objects.filter(
         xform=self.xform).latest('date_created')
     self.assertEqual(instance.deleted_at, None)
     # delete
     params = {'id': instance.id}
     response = self.client.post(self.delete_url, params)
     self.assertEqual(response.status_code, 200)
     self.assertEqual(
         Instance.objects.filter(deleted_at=None).count(), count - 1)
     instance = Instance.objects.get(id=instance.id)
     self.assertTrue(isinstance(instance.deleted_at, datetime))
     self.assertNotEqual(instance.deleted_at, None)
     query = '{"_id": %s}' % instance.id
     self.data_args.update({"query": query})
     after = [r for r in ParsedInstance.query_data(**self.data_args)]
     self.assertEqual(len(after), count - 1)
Esempio n. 9
0
    def test_edited_submission_require_auth(self):
        """
        Test submissions that have been edited
        """
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "..", "fixtures",
            "tutorial", "instances", "tutorial_2012-06-27_11-27-53_w_uuid.xml")
        # require authentication
        self.user.profile.require_auth = True
        self.user.profile.save()

        num_instances_history = InstanceHistory.objects.count()
        num_instances = Instance.objects.count()
        query_args = {
            'xform': self.xform,
            'query': '{}',
            'fields': '[]',
            'count': True
        }
        cursor = ParsedInstance.query_data(**query_args)
        num_data_instances = cursor[0]['count']
        # make first submission
        self._make_submission(xml_submission_file_path)

        self.assertEqual(self.response.status_code, 201)
        self.assertEqual(Instance.objects.count(), num_instances + 1)
        # no new record in instances history
        self.assertEqual(InstanceHistory.objects.count(),
                         num_instances_history)
        # check count of mongo instances after first submission
        cursor = ParsedInstance.query_data(**query_args)
        self.assertEqual(cursor[0]['count'], num_data_instances + 1)

        # create a new user
        alice = self._create_user('alice', 'alice')
        UserProfile.objects.create(user=alice)
        auth = DigestAuth('alice', 'alice')

        # edited submission
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)), "..", "fixtures",
            "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid_edited.xml")
        self._make_submission(xml_submission_file_path, auth=auth)
        self.assertEqual(self.response.status_code, 403)

        # assign report perms to user
        assign_perm('report_xform', alice, self.xform)
        assign_perm('logger.change_xform', alice, self.xform)

        self._make_submission(xml_submission_file_path, auth=auth)
        self.assertEqual(self.response.status_code, 201)
        # we must have the same number of instances
        self.assertEqual(Instance.objects.count(), num_instances + 1)
        # should be a new record in instances history
        self.assertEqual(InstanceHistory.objects.count(),
                         num_instances_history + 1)
        cursor = ParsedInstance.query_data(**query_args)
        self.assertEqual(cursor[0]['count'], num_data_instances + 1)
        # make sure we edited the mongo db record and NOT added a new row
        query_args['count'] = False
        cursor = ParsedInstance.query_data(**query_args)
        record = cursor[0]
        with open(xml_submission_file_path, "r") as f:
            xml_str = f.read()
        xml_str = clean_and_parse_xml(xml_str).toxml()
        edited_name = re.match(ur"^.+?<name>(.+?)</name>", xml_str).groups()[0]
        self.assertEqual(record['name'], edited_name)
    def test_edited_submission_require_auth(self):
        """
        Test submissions that have been edited
        """
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "..", "fixtures", "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid.xml"
        )
        # require authentication
        self.user.profile.require_auth = True
        self.user.profile.save()

        num_instances_history = InstanceHistory.objects.count()
        num_instances = Instance.objects.count()
        query_args = {
            'xform': self.xform,
            'query': '{}',
            'fields': '[]',
            'count': True
        }
        cursor = ParsedInstance.query_data(**query_args)
        num_data_instances = cursor[0]['count']
        # make first submission
        self._make_submission(xml_submission_file_path)

        self.assertEqual(self.response.status_code, 201)
        self.assertEqual(Instance.objects.count(), num_instances + 1)
        # no new record in instances history
        self.assertEqual(
            InstanceHistory.objects.count(), num_instances_history)
        # check count of mongo instances after first submission
        cursor = ParsedInstance.query_data(**query_args)
        self.assertEqual(cursor[0]['count'], num_data_instances + 1)

        # create a new user
        alice = self._create_user('alice', 'alice')
        UserProfile.objects.create(user=alice)
        auth = DigestAuth('alice', 'alice')

        # edited submission
        xml_submission_file_path = os.path.join(
            os.path.dirname(os.path.abspath(__file__)),
            "..", "fixtures", "tutorial", "instances",
            "tutorial_2012-06-27_11-27-53_w_uuid_edited.xml"
        )
        self._make_submission(xml_submission_file_path, auth=auth)
        self.assertEqual(self.response.status_code, 403)

        # assign report perms to user
        assign_perm('report_xform', alice, self.xform)
        assign_perm('logger.change_xform', alice, self.xform)

        self._make_submission(xml_submission_file_path, auth=auth)
        self.assertEqual(self.response.status_code, 201)
        # we must have the same number of instances
        self.assertEqual(Instance.objects.count(), num_instances + 1)
        # should be a new record in instances history
        self.assertEqual(
            InstanceHistory.objects.count(), num_instances_history + 1)
        cursor = ParsedInstance.query_data(**query_args)
        self.assertEqual(cursor[0]['count'], num_data_instances + 1)
        # make sure we edited the mongo db record and NOT added a new row
        query_args['count'] = False
        cursor = ParsedInstance.query_data(**query_args)
        record = cursor[0]
        with open(xml_submission_file_path, "r") as f:
            xml_str = f.read()
        xml_str = clean_and_parse_xml(xml_str).toxml()
        edited_name = re.match(ur"^.+?<name>(.+?)</name>", xml_str).groups()[0]
        self.assertEqual(record['name'], edited_name)
Esempio n. 11
0
def generate_export(export_type,
                    extension,
                    username,
                    id_string,
                    export_id=None,
                    filter_query=None,
                    group_delimiter='/',
                    split_select_multiples=True,
                    binary_select_multiples=False,
                    start=None,
                    end=None,
                    remove_group_name=False):
    """
    Create appropriate export object given the export type
    """
    # TODO resolve circular import
    from onadata.apps.viewer.models.export import Export
    export_type_func_map = {
        Export.XLS_EXPORT: 'to_xls_export',
        Export.CSV_EXPORT: 'to_flat_csv_export',
        Export.CSV_ZIP_EXPORT: 'to_zipped_csv',
        Export.SAV_ZIP_EXPORT: 'to_zipped_sav',
    }

    xform = XForm.objects.get(user__username__iexact=username,
                              id_string__iexact=id_string)

    records = ParsedInstance.query_data(xform,
                                        query=filter_query,
                                        start=start,
                                        end=end)

    export_builder = ExportBuilder()

    export_builder.TRUNCATE_GROUP_TITLE = remove_group_name
    export_builder.GROUP_DELIMITER = group_delimiter
    export_builder.SPLIT_SELECT_MULTIPLES = split_select_multiples
    export_builder.BINARY_SELECT_MULTIPLES = binary_select_multiples
    export_builder.set_survey(xform.data_dictionary().survey)

    temp_file = NamedTemporaryFile(suffix=("." + extension))

    # get the export function by export type
    func = getattr(export_builder, export_type_func_map[export_type])
    try:
        func.__call__(temp_file.name,
                      records,
                      username,
                      id_string,
                      filter_query,
                      start=start,
                      end=end)
    except NoRecordsFoundError:
        pass

    # generate filename
    basename = "%s_%s" % (id_string,
                          datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
    filename = basename + "." + extension

    # check filename is unique
    while not Export.is_filename_unique(xform, filename):
        filename = increment_index_in_filename(filename)

    file_path = os.path.join(username, 'exports', id_string, export_type,
                             filename)

    # TODO: if s3 storage, make private - how will we protect local storage??
    storage = get_storage_class()()
    # seek to the beginning as required by storage classes
    temp_file.seek(0)
    export_filename = storage.save(file_path, File(temp_file, file_path))
    temp_file.close()

    dir_name, basename = os.path.split(export_filename)

    # get or create export object
    if export_id:
        export = Export.objects.get(id=export_id)
    else:
        export = Export(xform=xform, export_type=export_type)
    export.filedir = dir_name
    export.filename = basename
    export.internal_status = Export.SUCCESSFUL
    # dont persist exports that have a filter
    if filter_query is None and start is None and end is None:
        export.save()
    return export
Esempio n. 12
0
def generate_external_export(export_type,
                             username,
                             id_string,
                             export_id=None,
                             token=None,
                             filter_query=None,
                             meta=None,
                             data_id=None):

    xform = XForm.objects.get(user__username__iexact=username,
                              id_string__iexact=id_string)
    user = User.objects.get(username=username)

    server, name = _get_server_from_metadata(xform, meta, token)

    # dissect the url
    parsed_url = urlparse(server)

    token = parsed_url.path[5:]

    ser = parsed_url.scheme + '://' + parsed_url.netloc

    # Get single submission data
    if data_id:
        inst = Instance.objects.filter(xform__user=user,
                                       xform__id_string=id_string,
                                       deleted_at=None,
                                       pk=data_id)

        instances = [inst[0].get_dict() if inst else {}]
    else:
        instances = ParsedInstance.query_data(xform, query=filter_query)

    records = _get_records(instances)

    status_code = 0
    if records and server:

        try:

            client = Client(ser)
            response = client.xls.create(token, json.dumps(records))

            if hasattr(client.xls.conn, 'last_response'):
                status_code = client.xls.conn.last_response.status_code
        except Exception as e:
            raise J2XException(
                u"J2X client could not generate report. Server -> {0},"
                u" Error-> {1}".format(server, e))
    else:
        if not server:
            raise J2XException(u"External server not set")
        elif not records:
            raise J2XException(
                u"No record to export. Form -> {0}".format(id_string))

    # get or create export object
    if export_id:
        export = Export.objects.get(id=export_id)
    else:
        export = Export.objects.create(xform=xform, export_type=export_type)

    export.export_url = response
    if status_code == 201:
        export.internal_status = Export.SUCCESSFUL
        export.filename = name + '-' + response[5:] if name else response[5:]
        export.export_url = ser + response
    else:
        export.internal_status = Export.FAILED

    export.save()

    return export
Esempio n. 13
0
def generate_export(export_type, extension, username, id_string,
                    export_id=None, filter_query=None, group_delimiter='/',
                    split_select_multiples=True,
                    binary_select_multiples=False, start=None, end=None,
                    remove_group_name=False):
    """
    Create appropriate export object given the export type
    """
    # TODO resolve circular import
    from onadata.apps.viewer.models.export import Export
    export_type_func_map = {
        Export.XLS_EXPORT: 'to_xls_export',
        Export.CSV_EXPORT: 'to_flat_csv_export',
        Export.CSV_ZIP_EXPORT: 'to_zipped_csv',
        Export.SAV_ZIP_EXPORT: 'to_zipped_sav',
    }

    xform = XForm.objects.get(
        user__username__iexact=username, id_string__iexact=id_string)

    records = ParsedInstance.query_data(xform, query=filter_query,
                                        start=start, end=end)

    export_builder = ExportBuilder()

    export_builder.TRUNCATE_GROUP_TITLE = remove_group_name
    export_builder.GROUP_DELIMITER = group_delimiter
    export_builder.SPLIT_SELECT_MULTIPLES = split_select_multiples
    export_builder.BINARY_SELECT_MULTIPLES = binary_select_multiples
    export_builder.set_survey(xform.data_dictionary().survey)

    temp_file = NamedTemporaryFile(suffix=("." + extension))

    # get the export function by export type
    func = getattr(export_builder, export_type_func_map[export_type])
    try:
        func.__call__(
            temp_file.name, records, username, id_string, filter_query,
            start=start, end=end
        )
    except NoRecordsFoundError:
        pass

    # generate filename
    basename = "%s_%s" % (
        id_string, datetime.now().strftime("%Y_%m_%d_%H_%M_%S"))
    filename = basename + "." + extension

    # check filename is unique
    while not Export.is_filename_unique(xform, filename):
        filename = increment_index_in_filename(filename)

    file_path = os.path.join(
        username,
        'exports',
        id_string,
        export_type,
        filename)

    # TODO: if s3 storage, make private - how will we protect local storage??
    storage = get_storage_class()()
    # seek to the beginning as required by storage classes
    temp_file.seek(0)
    export_filename = storage.save(file_path, File(temp_file, file_path))
    temp_file.close()

    dir_name, basename = os.path.split(export_filename)

    # get or create export object
    if export_id:
        export = Export.objects.get(id=export_id)
    else:
        export = Export(xform=xform, export_type=export_type)
    export.filedir = dir_name
    export.filename = basename
    export.internal_status = Export.SUCCESSFUL
    # dont persist exports that have a filter
    if filter_query is None and start is None and end is None:
        export.save()
    return export
Esempio n. 14
0
def generate_external_export(
    export_type, username, id_string, export_id=None,  token=None,
        filter_query=None, meta=None, data_id=None):

    xform = XForm.objects.get(
        user__username__iexact=username, id_string__iexact=id_string)
    user = User.objects.get(username=username)

    server, name = _get_server_from_metadata(xform, meta, token)

    # dissect the url
    parsed_url = urlparse(server)

    token = parsed_url.path[5:]

    ser = parsed_url.scheme + '://' + parsed_url.netloc

    # Get single submission data
    if data_id:
        inst = Instance.objects.filter(xform__user=user,
                                       xform__id_string=id_string,
                                       deleted_at=None,
                                       pk=data_id)

        instances = [inst[0].get_dict() if inst else {}]
    else:
        instances = ParsedInstance.query_data(xform, query=filter_query)

    records = _get_records(instances)

    status_code = 0
    if records and server:

        try:

            client = Client(ser)
            response = client.xls.create(token, json.dumps(records))

            if hasattr(client.xls.conn, 'last_response'):
                status_code = client.xls.conn.last_response.status_code
        except Exception as e:
            raise J2XException(
                u"J2X client could not generate report. Server -> {0},"
                u" Error-> {1}".format(server, e)
            )
    else:
        if not server:
            raise J2XException(u"External server not set")
        elif not records:
            raise J2XException(
                u"No record to export. Form -> {0}".format(id_string)
            )

    # get or create export object
    if export_id:
        export = Export.objects.get(id=export_id)
    else:
        export = Export.objects.create(xform=xform, export_type=export_type)

    export.export_url = response
    if status_code == 201:
        export.internal_status = Export.SUCCESSFUL
        export.filename = name + '-' + response[5:] if name else response[5:]
        export.export_url = ser + response
    else:
        export.internal_status = Export.FAILED

    export.save()

    return export
 def _get_data(self):
     cursor = ParsedInstance.query_data(**self.data_args)
     records = list(record for record in cursor)
     return records
Esempio n. 16
0
 def _get_data(self):
     cursor = ParsedInstance.query_data(**self.data_args)
     records = list(record for record in cursor)
     return records