Exemple #1
0
    def test_exporter_sends_second_email_after_2_hours(self):

        message_count = len(TEST_MESSAGES)

        log = DataTransferLog(last_updated=datetime.datetime.now() -
                              datetime.timedelta(minutes=30),
                              total_records=2,
                              type="export")
        db.session.add(log)
        db.session.commit()
        ExportService.send_alert_if_exports_not_running()
        self.assertGreater(len(TEST_MESSAGES), message_count)
        self.assertEqual(
            "Star Drive: Error - 30 minutes since last successful export",
            self.decode(TEST_MESSAGES[-1]['subject']))

        log.last_updated = datetime.datetime.now() - datetime.timedelta(
            minutes=120)
        db.session.add(log)
        db.session.commit()
        ExportService.send_alert_if_exports_not_running()
        self.assertGreater(len(TEST_MESSAGES), message_count + 1,
                           "another email should have gone out")
        self.assertEqual(
            "Star Drive: Error - 2 hours since last successful export",
            self.decode(TEST_MESSAGES[-1]['subject']))
 def get(self, name, participant_id):
     class_ref = ExportService.get_class(name)
     schema = ExportService.get_schema(name, many=True)
     questionnaires = db.session.query(class_ref)\
         .filter(class_ref.participant_id == participant_id)\
         .all()
     return schema.dump(questionnaires)
Exemple #3
0
 def add_step(self, questionnaireName):
     if not self.has_step(questionnaireName):
         class_name = ExportService.camel_case_it(questionnaireName)
         cls = ExportService.get_class(class_name)
         q = cls()
         step = Step(questionnaireName, q.__question_type__, q.__label__)
         self.steps.append(step)
 def get(self, name):
     name = ExportService.camel_case_it(name)
     if self.request_wants_json():
         schema = ExportService.get_schema(name, many=True)
         return schema.dump(ExportService().get_data(name))
     else:
         return ExportXlsService.export_xls(name=name, app=app)
Exemple #5
0
    def get(self, name):
        if name == "admin":
            return self.get_admin()

        name = ExportService.camel_case_it(name)
        schema = ExportService.get_schema(name, many=True)
        return schema.dump(ExportService().get_data(name, last_updated=get_date_arg()))
    def put(self, name, id):
        """
        Modifies an existing questionnaire record.

        Parameters:
            name (str):
                Snake-cased name of the questionnaire class (should also match the table name),
                found in app.model.questionnaires.
                E.g., clinical_diagnoses_questionnaire -> ClinicalDiagnosesQuestionnaire

            id (int): ID of the questionnaire record to retrieve

        Returns: The updated questionnaire record.
        """
        name = ExportService.camel_case_it(name)
        class_ref = ExportService.get_class(name)
        instance = db.session.query(class_ref).filter(
            class_ref.id == id).first()
        schema = ExportService.get_schema(name, session=db.session)
        request_data = request.get_json()
        if "_links" in request_data:
            request_data.pop("_links")

        try:
            updated = schema.load(request_data, instance=instance)
        except Exception as errors:
            raise RestException(RestException.INVALID_OBJECT, details=errors)

        updated.last_updated = datetime.datetime.utcnow()
        db.session.add(updated)
        db.session.commit()
        return schema.dump(updated)
Exemple #7
0
    def post(self, flow, questionnaire_name):
        flow = Flows.get_flow_by_name(flow)
        if flow is None:
            raise RestException(RestException.NOT_FOUND)
        if not flow.has_step(questionnaire_name):
            raise RestException(RestException.NOT_IN_THE_FLOW)
        request_data = request.get_json()
        request_data["user_id"] = g.user.id
        if "_links" in request_data:
            request_data.pop("_links")
        schema = ExportService.get_schema(
            ExportService.camel_case_it(questionnaire_name))
        new_quest, errors = schema.load(request_data, session=db.session)

        if errors:
            raise RestException(RestException.INVALID_OBJECT, details=errors)
        if new_quest.participant_id is None:
            raise RestException(RestException.INVALID_OBJECT,
                                details="You must supply a participant id.")
        if not g.user.related_to_participant(new_quest.participant_id):
            raise RestException(RestException.UNRELATED_PARTICIPANT)
        db.session.add(new_quest)
        db.session.commit()
        self.log_progress(flow, questionnaire_name, new_quest)
        return schema.dump(new_quest)
    def get(self, name):
        """
        Retrieves metadata about the given questionnaire name. Includes JSON Formly field definition.
        Used for data export to get meta without specifying flow and relationship.

        Returns:
            A dict object containing the metadata about the questionnaire. Example:
            {
                table: {
                    question_type: "sensitive",
                    label: "Clinical Diagnosis"
                },
                fields: [
                    {
                    name: "id",
                    key: "id",
                    display_order: 0
                    },
                    ...
                ]
            }
        """
        name = ExportService.camel_case_it(name)
        class_ref = ExportService.get_class(name)
        questionnaire = class_ref()
        meta = {"table": {}}
        try:
            meta["table"]['question_type'] = questionnaire.__question_type__
            meta["table"]["label"] = questionnaire.__label__
        except:
            pass  # If these fields don't exist, just keep going.
        meta["fields"] = []

        # This will move fields referenced by the field groups into the group, but will otherwise add them
        # the base meta object if they are not contained within a group.
        for c in questionnaire.__table__.columns:
            if c.info:
                c.info['name'] = c.name
                c.info['key'] = c.name
                meta['fields'].append(c.info)
            elif c.type.python_type == datetime.datetime:
                meta['fields'].append({
                    'name': c.name,
                    'key': c.name,
                    'display_order': 0,
                    'type': 'DATETIME'
                })
            else:
                meta['fields'].append({
                    'name': c.name,
                    'key': c.name,
                    'display_order': 0
                })

        # Sort the fields
        meta['fields'] = sorted(meta['fields'],
                                key=lambda field: field['display_order'])

        return meta
Exemple #9
0
 def get(self, flow, questionnaire_name):
     questionnaire_name = ExportService.camel_case_it(questionnaire_name)
     flow = Flows.get_flow_by_name(flow)
     if flow is None:
         raise RestException(RestException.NOT_FOUND)
     class_ref = ExportService.get_class(questionnaire_name)
     questionnaire = class_ref()
     return ExportService.get_meta(questionnaire, flow.relationship)
Exemple #10
0
 def test_exporter_notifies_PI_after_24_hours(self):
     message_count = len(TEST_MESSAGES)
     log = DataTransferLog(last_updated=datetime.datetime.utcnow() - datetime.timedelta(hours=24), total_records=2,
                           type="export")
     db.session.add(log)
     db.session.commit()
     ExportService.send_alert_if_exports_not_running()
     self.assertTrue("*****@*****.**" in TEST_MESSAGES[-1]['To'])
 def get(self, name, id):
     name = ExportService.camel_case_it(name)
     class_ref = ExportService.get_class(name)
     instance = db.session.query(class_ref).filter(
         class_ref.id == id).first()
     if instance is None:
         raise RestException(RestException.NOT_FOUND)
     schema = ExportService.get_schema(name)
     return schema.dump(instance)
Exemple #12
0
 def test_exporter_sends_20_emails_over_first_48_hours(self):
     message_count = len(TEST_MESSAGES)
     log = DataTransferLog(last_updated=datetime.datetime.utcnow() - datetime.timedelta(days=2), total_records=2,
                           type="export")
     db.session.add(log)
     db.session.commit()
     for i in range(20):
         ExportService.send_alert_if_exports_not_running()
     self.assertEqual(message_count + 20, len(TEST_MESSAGES), msg="20 emails should have gone out.")
    def load_data(self, export_info, log):
        if len(export_info.json_data) < 1:
            return  # Nothing to do here.
        schema = ExportService.get_schema(export_info.class_name,
                                          many=False,
                                          is_import=True)
        model_class = ExportService.get_class(export_info.class_name)
        log_detail = DataTransferLogDetail(class_name=export_info.class_name,
                                           date_started=log.date_started,
                                           successful=True,
                                           success_count=0,
                                           failure_count=0)
        log.details.append(log_detail)
        for item in export_info.json_data:
            item_copy = dict(item)
            if "_links" in item_copy:
                links = item_copy.pop("_links")
            existing_model = self.db.session.query(model_class).filter_by(
                id=item['id']).first()
            try:
                if existing_model:
                    model = schema.load(data=item_copy,
                                        session=self.db.session,
                                        instance=existing_model)
                else:
                    model = schema.load(data=item_copy,
                                        session=self.db.session)

                try:
                    self.db.session.add(model)
                    self.db.session.commit()
                    log_detail.handle_success()
                    self.db.session.add(log_detail)
                    if hasattr(
                            model, '__question_type__'
                    ) and model.__question_type__ == ExportService.TYPE_SENSITIVE:
                        print("Sensitive Data.  Calling Delete.")
                        self.delete_record(item)
                except Exception as e:
                    self.db.session.rollback()
                    self.logger.error("Error processing " +
                                      export_info.class_name + " with id of " +
                                      str(item["id"]) + ".  Error: " + str(e))
                    log_detail.handle_failure(e)
                    self.db.session.add(log)
                    self.db.session.add(log_detail)
                    raise e

            except Exception as e:
                e = Exception("Failed to parse model " +
                              export_info.class_name + ". " + str(e))
                log_detail.handle_failure(e)
                self.db.session.add(log)
                self.db.session.add(log_detail)
                raise e

        self.db.session.commit()
Exemple #14
0
    def test_exporter_sends_no_email_alert_if_less_than_30_minutes_pass_without_export(self):

        message_count = len(TEST_MESSAGES)

        log = DataTransferLog(last_updated=datetime.datetime.utcnow() - datetime.timedelta(minutes=28), total_records=2,
                              type="export")
        db.session.add(log)
        db.session.commit()
        ExportService.send_alert_if_exports_not_running()
        self.assertEqual(len(TEST_MESSAGES), message_count)
 def delete(self, name, id):
     try:
         name = ExportService.camel_case_it(name)
         class_ref = ExportService.get_class(name)
         instance = db.session.query(class_ref).filter(
             class_ref.id == id).first()
         db.session.delete(instance)
         #            db.session.query(class_ref).filter(class_ref.id == id).delete()
         db.session.commit()
     except IntegrityError as error:
         raise RestException(RestException.CAN_NOT_DELETE)
     return
Exemple #16
0
 def test_exporter_sends_12_emails_over_first_24_hours(self):
     """
     If more than 24 hours pass without an export from the Public Mirror to the Private Mirror, an email will be
     sent to an administrative email address at the 30 minute and then every 2 hours after that.
     """
     message_count = len(TEST_MESSAGES)
     date = datetime.datetime.utcnow() - datetime.timedelta(hours=22)
     log = DataTransferLog(last_updated=date,
                           total_records=2, type="export")
     db.session.add(log)
     db.session.commit()
     for i in range(20):
         ExportService.send_alert_if_exports_not_running()
     self.assertEqual(message_count + 12, len(TEST_MESSAGES), msg="12 emails should have gone out.")
Exemple #17
0
    def get(self):

        date_started = datetime.datetime.now()
        info_list = ExportService.get_table_info(get_date_arg())

        # Remove items that are not exportable, or that are identifying
        info_list = [item for item in info_list if item.exportable]
        info_list = [item for item in info_list if item.question_type != ExportService.TYPE_IDENTIFYING]

        # Get a count of the records, and log it.
        log = DataTransferLog(type="export")
        total_records_for_export = 0
        for item in info_list:
            total_records_for_export += item.size
            if item.size > 0:
                log_detail = DataTransferLogDetail(date_started=date_started, class_name=item.class_name,
                                                   successful=True, success_count=item.size)
                log.details.append(log_detail)
        log.total_records = total_records_for_export;

        # If we find we aren't exporting anything, don't create a new log, just update the last one.
        if total_records_for_export == 0:
            log = db.session.query(DataTransferLog).filter(DataTransferLog.type == 'export')\
                .order_by(desc(DataTransferLog.last_updated)).limit(1).first()
            if log is None: log = DataTransferLog(type="export", total_records=0)
            log.last_updated = datetime.datetime.now()
        db.session.add(log)
        db.session.commit()

        return self.schema.dump(info_list)
    def put(self, name, id):
        name = ExportService.camel_case_it(name)
        class_ref = ExportService.get_class(name)
        instance = db.session.query(class_ref).filter(
            class_ref.id == id).first()
        schema = ExportService.get_schema(name, session=db.session)
        request_data = request.get_json()
        if "_links" in request_data:
            request_data.pop("_links")
        updated, errors = schema.load(request_data, instance=instance)

        if errors:
            raise RestException(RestException.INVALID_OBJECT, details=errors)
        updated.last_updated = datetime.datetime.utcnow()
        db.session.add(updated)
        db.session.commit()
        return schema.dump(updated)
Exemple #19
0
 def test_retrieve_records_later_than(self):
     self.construct_everything()
     date = datetime.datetime.utcnow() + datetime.timedelta(seconds=1)  # One second in the future
     exports = ExportService.get_table_info()
     params = "?after=" + date.strftime(ExportService.DATE_FORMAT)
     for export in exports:
         rv = self.app.get(export.url + params, follow_redirects=True, content_type="application/json",
                           headers=self.logged_in_headers())
         data = json.loads(rv.get_data(as_text=True))
         self.assertEqual(0, len(data), msg=export.url + " does not respect 'after' param in get request.")
Exemple #20
0
 def test_sensitive_records_returned_can_be_deleted(self):
     self.construct_all_questionnaires()
     exports = ExportService.get_table_info()
     for export in exports:
         rv = self.app.get(export.url, follow_redirects=True, content_type="application/json",
                           headers=self.logged_in_headers())
         data = json.loads(rv.get_data(as_text=True))
         for d in data:
             if export.question_type == ExportService.TYPE_SENSITIVE:
                 del_rv = self.app.delete(d['_links']['self'], headers=self.logged_in_headers())
                 self.assert_success(del_rv)
 def get_questionnaire_names(app):
     all_file_names = os.listdir(
         os.path.dirname(app.instance_path) + '/app/model/questionnaires')
     non_questionnaires = ['mixin', '__']
     questionnaire_file_names = []
     for index, file_name in enumerate(all_file_names):
         if any(string in file_name for string in non_questionnaires):
             pass
         else:
             f = file_name.replace(".py", "")
             questionnaire_file_names.append(ExportService.camel_case_it(f))
     return sorted(questionnaire_file_names)
Exemple #22
0
    def test_exporter_sends_second_email_after_2_hours(self):
        """
        If more than 2 hours pass without an export from the Public Mirror to the Private Mirror, an email will be
        sent to an administrative email address at the 30 minute and 2 hour marks.
        """
        message_count = len(TEST_MESSAGES)

        log = DataTransferLog(last_updated=datetime.datetime.utcnow() - datetime.timedelta(minutes=30), total_records=2, type="export")
        db.session.add(log)
        db.session.commit()
        ExportService.send_alert_if_exports_not_running()
        print('@ 30 minutes:', len(TEST_MESSAGES), 'messages')
        self.assertGreater(len(TEST_MESSAGES), message_count)
        self.assertEqual("Autism DRIVE: Error - 30 minutes since last successful export", self.decode(TEST_MESSAGES[-1]['subject']))

        log.last_updated = datetime.datetime.utcnow() - datetime.timedelta(minutes=120)
        db.session.add(log)
        db.session.commit()
        ExportService.send_alert_if_exports_not_running()
        print('@ 2 hours:', len(TEST_MESSAGES), 'messages')
        self.assertGreater(len(TEST_MESSAGES), message_count + 1, "another email should have gone out")
        self.assertEqual("Autism DRIVE: Error - 2 hours since last successful export", self.decode(TEST_MESSAGES[-1]['subject']))
    def get(self, name, id):
        """
        Returns a single questionnaire record.

        Parameters:
            name (str):
                Snake-cased name of the questionnaire class (should also match the table name),
                found in app.model.questionnaires.
                E.g., clinical_diagnoses_questionnaire -> ClinicalDiagnosesQuestionnaire

            id (int): ID of the questionnaire record to retrieve

        Returns: A single questionnaire record.
        """
        name = ExportService.camel_case_it(name)
        class_ref = ExportService.get_class(name)
        instance = db.session.query(class_ref).filter(
            class_ref.id == id).first()
        if instance is None:
            raise RestException(RestException.NOT_FOUND)
        schema = ExportService.get_schema(name)
        return schema.dump(instance)
Exemple #24
0
 def test_all_sensitive_exports_have_links_to_self(self):
     self.construct_everything()
     exports = ExportService.get_table_info()
     for export in exports:
         if export.question_type != ExportService.TYPE_SENSITIVE:
             continue
         rv = self.app.get(export.url, follow_redirects=True, content_type="application/json",
                           headers=self.logged_in_headers())
         data = json.loads(rv.get_data(as_text=True))
         for d in data:
             self.assertTrue('_links' in d, msg="%s should have links in json." % export.class_name)
             self.assertTrue('self' in d['_links'])
             self.assert_success(self.app.get(d['_links']['self'], headers=self.logged_in_headers()))
    def get(self, name):
        name = ExportService.camel_case_it(name)
        class_ref = ExportService.get_class(name)
        questionnaire = class_ref()
        meta = {"table": {}}
        try:
            meta["table"]['question_type'] = questionnaire.__question_type__
            meta["table"]["label"] = questionnaire.__label__
        except:
            pass  # If these fields don't exist, just keep going.
        meta["fields"] = []

        # This will move fields referenced by the field groups into the group, but will otherwise add them
        # the base meta object if they are not contained within a group.
        for c in questionnaire.__table__.columns:
            if c.info:
                c.info['name'] = c.name
                c.info['key'] = c.name
                meta['fields'].append(c.info)
            elif c.type.python_type == datetime.datetime:
                meta['fields'].append({
                    'name': c.name,
                    'key': c.name,
                    'display_order': 0,
                    'type': 'DATETIME'
                })
            else:
                meta['fields'].append({
                    'name': c.name,
                    'key': c.name,
                    'display_order': 0
                })

        # Sort the fields
        meta['fields'] = sorted(meta['fields'],
                                key=lambda field: field['display_order'])

        return meta
    def delete(self, name, id):
        """
        Deletes a single questionnaire record.

        Parameters:
            name (str):
                Snake-cased name of the questionnaire class (should also match the table name),
                found in app.model.questionnaires.
                E.g., clinical_diagnoses_questionnaire -> ClinicalDiagnosesQuestionnaire

            id (int): ID of the questionnaire record to delete
        """
        try:
            name = ExportService.camel_case_it(name)
            class_ref = ExportService.get_class(name)
            instance = db.session.query(class_ref).filter(
                class_ref.id == id).first()
            db.session.delete(instance)
            #            db.session.query(class_ref).filter(class_ref.id == id).delete()
            db.session.commit()
        except IntegrityError as error:
            raise RestException(RestException.CAN_NOT_DELETE)
        return
    def get(self):
        """
        Lists available questionnaires. Used for data export to get meta without specifying flow and relationship.

        Returns:
            list[ExportInfoSchema] - A list of dict objects, including the following info for each questionnaire:
                table_name (str): Snake-case database table name. E.g., "chain_session_questionnaire",
                class_name (str): Pascal-case class name for Model class. E.g., "ChainSession",
                display_name (str): Questionnaire title. E.g., "Chain Session Assessment",
                size (int): Number of questionnaire records in the database,
                url (str): Export endpoint. E.g., "/api/export/chain_session_questionnaire",
                question_type (str): 'sensitive' | 'identifying' | 'unrestricted' | 'sub-table'
                sub_tables (list[ExportInfoSchema]): A list of sub-tables within this table, if applicable.
        """
        info_list = ExportService.get_table_info()
        info_list = [item for item in info_list if item.question_type]
        info_list = sorted(info_list, key=lambda item: item.table_name)
        return ExportInfoSchema(many=True).dump(info_list)
Exemple #28
0
    def test_exporter_sends_email_alert_if_30_minutes_pass_without_export(self):
        """
        If more than 30 minutes pass without an export from the Public Mirror to the Private Mirror, an email should be
        sent to an administrative email address.
        """
        message_count = len(TEST_MESSAGES)

        log = DataTransferLog(last_updated=datetime.datetime.utcnow() - datetime.timedelta(minutes=45), total_records=2,
                              type="export")
        db.session.add(log)
        db.session.commit()

        ExportService.send_alert_if_exports_not_running()
        self.assertGreater(len(TEST_MESSAGES), message_count)
        self.assertEqual("Autism DRIVE: Error - 45 minutes since last successful export",
                         self.decode(TEST_MESSAGES[-1]['subject']))
        ExportService.send_alert_if_exports_not_running()
        ExportService.send_alert_if_exports_not_running()
        ExportService.send_alert_if_exports_not_running()
        self.assertEqual(message_count + 1, len(TEST_MESSAGES), msg="No more messages should be sent.")
        self.assertEqual("*****@*****.**", TEST_MESSAGES[-1]['To'])
Exemple #29
0
    def test_exporter_sends_email_alert_if_30_minutes_pass_without_export(
            self):

        message_count = len(TEST_MESSAGES)

        log = DataTransferLog(last_updated=datetime.datetime.now() -
                              datetime.timedelta(minutes=45),
                              total_records=2,
                              type="export")
        db.session.add(log)
        db.session.commit()

        ExportService.send_alert_if_exports_not_running()
        self.assertGreater(len(TEST_MESSAGES), message_count)
        self.assertEqual(
            "Star Drive: Error - 45 minutes since last successful export",
            self.decode(TEST_MESSAGES[-1]['subject']))
        ExportService.send_alert_if_exports_not_running()
        ExportService.send_alert_if_exports_not_running()
        ExportService.send_alert_if_exports_not_running()
        self.assertEqual(message_count + 1,
                         len(TEST_MESSAGES),
                         msg="No more messages should be sent.")
        self.assertEqual("*****@*****.**", TEST_MESSAGES[-1]['To'])
    def export_xls(name, app, user_id=None):
        # Flask response
        response = Response()
        response.status_code = 200

        # Create an in-memory output file for the new workbook.
        output = io.BytesIO()

        if name.lower() == 'all':
            # Get Questionnaire Names
            questionnaire_names = ExportXlsService.get_questionnaire_names(app)
        else:
            cl = ExportService.get_class(name)
            info = ExportService.get_single_table_info(cl, None)
            questionnaire_names = [name]
            for sub_table in info.sub_tables:
                questionnaire_names.append(sub_table.class_name)

        # Create workbook
        workbook = xlsxwriter.Workbook(output, {'in_memory': True})

        # Add a bold format to use to highlight cells.
        bold = workbook.add_format({'bold': True})

        for qname in questionnaire_names:
            worksheet = workbook.add_worksheet(
                ExportXlsService.pretty_title_from_snakecase(qname))
            # Some data we want to write to the worksheet.
            # Get header fields from the schema in case the first record is missing fields
            schema = ExportService.get_schema(qname, many=True)
            header_fields = schema.fields
            if user_id:
                questionnaires = schema.dump(ExportService().get_data(
                    name=qname, user_id=user_id),
                                             many=True)
            else:
                questionnaires = schema.dump(
                    ExportService().get_data(name=qname), many=True)

            # Start from the first cell. Rows and columns are zero indexed.
            row = 0
            col = 0

            # Write the column headers.
            for (key, value) in header_fields.items():
                if key != "_links":
                    worksheet.write(row, col, key, bold)
                    col += 1
            row += 1

            # Iterate over the data and write it out row by row.
            for questionnaire in questionnaires:
                # Start from the first cell. Rows and columns are zero indexed.
                col = 0
                for (key, value) in questionnaire.items():
                    if key == "_links":
                        continue  # Don't export _links
                    if isinstance(value, dict):
                        continue
                    if isinstance(value,
                                  list) and len(value) > 0 and isinstance(
                                      value[0], dict):
                        continue  # Don't try to represent sub-table data.
                    if isinstance(value, list):
                        list_string = ''
                        for list_value in value:
                            list_string = list_string + str(list_value) + ', '
                        worksheet.write(row, col, list_string)
                    else:
                        worksheet.write(row, col, value)
                    col += 1
                row += 1

        # Close the workbook before streaming the data.
        workbook.close()

        # Rewind the buffer.
        output.seek(0)

        # Add output to response
        response.data = output.read()

        # Set filename
        file_name = 'export_{}_{}.xlsx'.format(name, datetime.utcnow())

        # HTTP headers for forcing file download
        response_headers = Headers({
            'Pragma':
            "public",  # required,
            'Expires':
            '0',
            'Cache-Control':
            'must-revalidate, private',  # required for certain browsers
            'Content-Type':
            'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet',
            'Content-Disposition':
            'attachment; filename=\"%s\";' % file_name,
            'Content-Transfer-Encoding':
            'binary',
            'Access-Control-Expose-Headers':
            'x-filename',
            'x-filename':
            file_name,
            'Content-Length':
            len(response.data)
        })

        # Add headers
        response.headers = response_headers

        # jquery.fileDownload.js requirements
        response.set_cookie('fileDownload', 'true', path='/')

        # Return the response
        return response