def handleCancel(self, action):
     api.portal.show_message(
         message=_("import_message_cancel",  # nopep8
             default="Import canceled."),
         request=self.request,
         type="info")
     self.request.response.redirect(self.context.absolute_url())
 def handleCancel(self, action):
     api.portal.show_message(
         message=_("import_message_cancel",  # nopep8
             default="Import canceled."),
         request=self.request,
         type="info")
     self.request.response.redirect(self.context.absolute_url())
class IImportSchema(form.Schema):
    """Define fields used on the form."""

    #TODO: need to get rid of "Keep existing file" etc. It's confusing
    # suspect this is the wrong field type
    import_file = NamedFile(
        title=_(
            "import_field_import_file_title",  # nopep8
            default=u"CSV metadata to import"),
        description=_(
            "import_field_import_file_description",  # nopep8
            default=
            u"CSV file containing rows for each content to create or update"),
        required=False)
    #    form.widget('header_mapping', NamedFileFieldWidget)
    header_mapping = schema.List(
        title=_(u'Header Mapping'),
        description=_(
            u"For Import any matching headers in your CSV will be mapped to "
            u"these fields. If Header is not found then the data will be ignored."
            u" For export, the internal fields will be exported to the "
            u" header names listed."),
        value_type=DictRow(title=u"tablerow", schema=IMappingRow),
        missing_value={},
        required=False)

    primary_key = schema.Choice(
        title=_(
            "import_field_primary_key_title",  # nopep8
            default=u"Test if content exists using"),
        description=_(
            "import_field_primary_key_description",
            default=
            u"Field with unique id to use to check if content already exists.  "
            u"Ignored for export."
            "Normally 'Short Name' or 'Path'."),
        source=fields_list,  #TODO: should be index not fieldname
        required=True,
        default=u"id")
    object_type = schema.Choice(
        title=_(
            "import_field_object_type_title",  # nopep8
            default=u"If not found"),
        description=_(
            "import_field_object_type_description",
            default=
            u"If content can't be found then Create, Skip or Stop at that row. "
            u"For rich media such as Videos, upload first. Ignored for "
            u"export."),
        source=if_not_found_list,
        required=True)
    def handleSaveImport(self, action):
        """Create and handle form button "Save and Import"."""

        # Extract form field values and errors from HTTP request
        data, errors = self.extractData()
        if errors:
            self.status = self.formErrorsMessage
            return False

        import_file = data["import_file"]


        if not import_file:
            raise WidgetActionExecutionError('import_file',
                Invalid(_(u"Please provide a csv file to import")))
            return


        # File upload is not saved in settings
        file_resource = import_file.data
        file_name = import_file.filename

        if not (import_file.contentType.startswith("text/") or \
            import_file.contentType.startswith("application/csv")):
            raise WidgetActionExecutionError('import_file',
                Invalid(_(u"Please provide a file of type CSV")))
            return
        if import_file.contentType.startswith("application/vnd.ms-excel"):
            raise WidgetActionExecutionError('import_file',
                Invalid(_(u"Please convert your Excel file to CSV first")))
            return



        if data["object_type"] in ['__ignore__', '__stop__']:
            create_new = False
            object_type = None
        else:
            create_new = True
            object_type = data["object_type"]

        # list all the dexterity types
        #dx_types = get_portal_types(self.request)
        #log.debug(dx_types)

        # based from the types, display all the fields
        # fields = get_schema_info(CREATION_TYPE)
        # log.debug(fields)

        # blank header or field means we don't want it
        header_mapping = [d for d in data['header_mapping'] if d['field'] and d['header']]

        matching_headers = dict([(d['field'],d['header']) for d in header_mapping])


        if create_new and not(matching_headers.get('id') or matching_headers.get('title')):
            raise WidgetActionExecutionError('header_mapping',
                Invalid(_(u"If creating new content you need either 'Short Name"
                u" or 'Title' in your data.")))
            return

        if not matching_headers:
            raise WidgetActionExecutionError('header_mapping',
                Invalid(_(u"You must pick which fields should contain your data")))
            return

        primary_key = data["primary_key"]
        if primary_key and not matching_headers.get(primary_key):
            raise WidgetActionExecutionError('primary_key',
                Invalid(_(u"Must be a field selected in Header Mapping")))
            return

        # based from the matching fields, get all the values.
        matching_fields = dict([(d['header'],d['field']) for d in header_mapping])
        import_metadata = dexterity_import(
            self.context,
            file_resource,
            matching_fields,
            object_type,
            create_new,
            primary_key
        )

        existing_count = import_metadata["existing_count"]
        new_count = import_metadata["new_count"]
        ignore_count = import_metadata["ignore_count"]

        api.portal.show_message(
            message=_("import_message_csv_info",  # nopep8
                default=u"""${new_num} items added,
                    ${existing_num} items updated and
                    ${ignore_num} items skipped
                    from ${filename}""",
                mapping={"new_num": new_count,
                    "existing_num": existing_count,
                    "ignore_num": ignore_count,
                    "filename": file_name}),
            request=self.request,
            type="info")

        self.import_metadata = import_metadata

        # Save our sucessful settings to save time next import
        annotations = IAnnotations(self.context)
        settings = annotations.setdefault(KEY, {})
        settings['header_list'] = [d['header'] for d in header_mapping]
        # we will keep making this bigger in case they switch between several CSVs
        settings.setdefault("matching_fields",{}).update(matching_fields)
        settings['primary_key'] = primary_key
        settings['object_type'] = object_type

        return True
    def handleSaveImport(self, action):
        """Create and handle form button "Save and Import"."""

        # Extract form field values and errors from HTTP request
        data, errors = self.extractData()
        if errors:
            self.status = self.formErrorsMessage
            return False

        import_file = data["import_file"]

        if not import_file:
            raise WidgetActionExecutionError(
                'import_file',
                Invalid(_(u"Please provide a csv file to import")))
            return

        # File upload is not saved in settings
        file_resource = import_file.data
        file_name = import_file.filename

        if not (import_file.contentType.startswith("text/") or \
            import_file.contentType.startswith("application/csv")):
            raise WidgetActionExecutionError(
                'import_file',
                Invalid(_(u"Please provide a file of type CSV")))
            return
        if import_file.contentType.startswith("application/vnd.ms-excel"):
            raise WidgetActionExecutionError(
                'import_file',
                Invalid(_(u"Please convert your Excel file to CSV first")))
            return

        if data["object_type"] in ['__ignore__', '__stop__']:
            create_new = False
            object_type = None
        else:
            create_new = True
            object_type = data["object_type"]

        # list all the dexterity types
        #dx_types = get_portal_types(self.request)
        #log.debug(dx_types)

        # based from the types, display all the fields
        # fields = get_schema_info(CREATION_TYPE)
        # log.debug(fields)

        # blank header or field means we don't want it
        header_mapping = [
            d for d in data['header_mapping'] if d['field'] and d['header']
        ]

        matching_headers = dict([(d['field'], d['header'])
                                 for d in header_mapping])

        if create_new and not (matching_headers.get('id')
                               or matching_headers.get('title')):
            raise WidgetActionExecutionError(
                'header_mapping',
                Invalid(
                    _(u"If creating new content you need either 'Short Name"
                      u" or 'Title' in your data.")))
            return

        if not matching_headers:
            raise WidgetActionExecutionError(
                'header_mapping',
                Invalid(
                    _(u"You must pick which fields should contain your data")))
            return

        primary_key = data["primary_key"]
        if primary_key and not matching_headers.get(primary_key):
            raise WidgetActionExecutionError(
                'primary_key',
                Invalid(_(u"Must be a field selected in Header Mapping")))
            return

        # based from the matching fields, get all the values.
        matching_fields = dict([(d['header'], d['field'])
                                for d in header_mapping])
        import_metadata = dexterity_import(self.context, file_resource,
                                           matching_fields, object_type,
                                           create_new, primary_key)

        existing_count = import_metadata["existing_count"]
        new_count = import_metadata["new_count"]
        ignore_count = import_metadata["ignore_count"]

        api.portal.show_message(
            message=_(
                "import_message_csv_info",  # nopep8
                default=u"""${new_num} items added,
                    ${existing_num} items updated and
                    ${ignore_num} items skipped
                    from ${filename}""",
                mapping={
                    "new_num": new_count,
                    "existing_num": existing_count,
                    "ignore_num": ignore_count,
                    "filename": file_name
                }),
            request=self.request,
            type="info")

        self.import_metadata = import_metadata

        # Save our sucessful settings to save time next import
        annotations = IAnnotations(self.context)
        settings = annotations.setdefault(KEY, {})
        settings['header_list'] = [d['header'] for d in header_mapping]
        # we will keep making this bigger in case they switch between several CSVs
        settings.setdefault("matching_fields", {}).update(matching_fields)
        settings['primary_key'] = primary_key
        settings['object_type'] = object_type

        return True
class ImportForm(form.SchemaForm):
    """Import data to dexterity-types."""

    # Which plone.directives.form.Schema subclass is used to define
    # fields for this form
    schema = IImportSchema
    ignoreContext = False

    # Form label
    label = _(
        "import_form_label",  # nopep8
        default=u"CSV Import/Export")
    description = _(
        "import_form_description",  # nopep8
        default=u"Create or Update content from a CSV. "
        u"For images, files, videos or html documents, use Upload first and use "
        u"CSV import to set the metadata of the uploaded files.")

    def getContent(self):
        """ """

        # Create a temporary object holding the settings values out of the patient

        class TemporarySettingsContext(object):
            implements(IImportSchema)

        obj = TemporarySettingsContext()

        annotations = IAnnotations(self.context)
        settings = annotations.get(KEY)
        if settings:
            obj.primary_key = settings['primary_key']
            obj.object_type = settings['object_type']

        obj.header_mapping = self.headersFromRequest()
        return obj

    def headersFromRequest(self):
        rows = []
        request = self.request
        context = self.context

        # try and load it from settings
        settings = IAnnotations(getContext(context)).get(KEY, {})
        if not settings:
            #TODO: should look in the parent?
            pass
        header_list = settings.get('header_list', [])
        matching_fields = settings.get('matching_fields', {})
        if request.get('csv_header'):
            reader = csv.DictReader(request.get('csv_header').splitlines(),
                                    delimiter=",",
                                    dialect="excel",
                                    quotechar='"')
            header_list = reader.fieldnames

        fields = fields_list(None)
        field_names = {}
        for field in fields:
            field_names[field.title.lower()] = field.value
            field_names[field.value.lower()] = field.value

        for col in header_list:
            col = unicode(col.strip())
            if not col:
                continue
            if col in matching_fields:
                matched_field = matching_fields[col]
            elif col.lower() in field_names:
                matched_field = field_names[col.lower()]
            else:
                matched_field = ""
            rows.append(dict(header=col, field=matched_field))
        return rows

    def updateWidgets(self):
        self.fields['header_mapping'].widgetFactory = DataGridFieldFactory
        # get around a bug. not sure whose fault it is.
        # seems likely is the datagrid field
        self.fields['header_mapping'].field.bind(self.context)
        super(ImportForm, self).updateWidgets()

    def getObjectsToExport(self):
        # Extract form field values and errors from HTTP request
        data, errors = self.extractData()
        if errors:
            return False
        container = self.context
        container_path = "/".join(container.getPhysicalPath())
        #TODO: should we allow more criteria? or at least filter by type?
        query = dict(path={
            "query": container_path,
            "depth": 1
        },
                     #                    portal_type=object_type,
                     )
        #        query[primary_key]=key_arg[primary_key]

        # blank header or field means we don't want it
        header_mapping = [
            d for d in data['header_mapping'] if d['field'] and d['header']
        ]

        catalog = api.portal.get_tool("portal_catalog")
        results = catalog(**query)

        return header_mapping, results

    @button.buttonAndHandler(
        _(
            "import_button_save_import",  # nopep8
            default=u"CSV Import"))
    def handleSaveImport(self, action):
        """Create and handle form button "Save and Import"."""

        # Extract form field values and errors from HTTP request
        data, errors = self.extractData()
        if errors:
            self.status = self.formErrorsMessage
            return False

        import_file = data["import_file"]

        if not import_file:
            raise WidgetActionExecutionError(
                'import_file',
                Invalid(_(u"Please provide a csv file to import")))
            return

        # File upload is not saved in settings
        file_resource = import_file.data
        file_name = import_file.filename

        if not (import_file.contentType.startswith("text/") or \
            import_file.contentType.startswith("application/csv")):
            raise WidgetActionExecutionError(
                'import_file',
                Invalid(_(u"Please provide a file of type CSV")))
            return
        if import_file.contentType.startswith("application/vnd.ms-excel"):
            raise WidgetActionExecutionError(
                'import_file',
                Invalid(_(u"Please convert your Excel file to CSV first")))
            return

        if data["object_type"] in ['__ignore__', '__stop__']:
            create_new = False
            object_type = None
        else:
            create_new = True
            object_type = data["object_type"]

        # list all the dexterity types
        #dx_types = get_portal_types(self.request)
        #log.debug(dx_types)

        # based from the types, display all the fields
        # fields = get_schema_info(CREATION_TYPE)
        # log.debug(fields)

        # blank header or field means we don't want it
        header_mapping = [
            d for d in data['header_mapping'] if d['field'] and d['header']
        ]

        matching_headers = dict([(d['field'], d['header'])
                                 for d in header_mapping])

        if create_new and not (matching_headers.get('id')
                               or matching_headers.get('title')):
            raise WidgetActionExecutionError(
                'header_mapping',
                Invalid(
                    _(u"If creating new content you need either 'Short Name"
                      u" or 'Title' in your data.")))
            return

        if not matching_headers:
            raise WidgetActionExecutionError(
                'header_mapping',
                Invalid(
                    _(u"You must pick which fields should contain your data")))
            return

        primary_key = data["primary_key"]
        if primary_key and not matching_headers.get(primary_key):
            raise WidgetActionExecutionError(
                'primary_key',
                Invalid(_(u"Must be a field selected in Header Mapping")))
            return

        # based from the matching fields, get all the values.
        matching_fields = dict([(d['header'], d['field'])
                                for d in header_mapping])
        import_metadata = dexterity_import(self.context, file_resource,
                                           matching_fields, object_type,
                                           create_new, primary_key)

        existing_count = import_metadata["existing_count"]
        new_count = import_metadata["new_count"]
        ignore_count = import_metadata["ignore_count"]

        api.portal.show_message(
            message=_(
                "import_message_csv_info",  # nopep8
                default=u"""${new_num} items added,
                    ${existing_num} items updated and
                    ${ignore_num} items skipped
                    from ${filename}""",
                mapping={
                    "new_num": new_count,
                    "existing_num": existing_count,
                    "ignore_num": ignore_count,
                    "filename": file_name
                }),
            request=self.request,
            type="info")

        self.import_metadata = import_metadata

        # Save our sucessful settings to save time next import
        annotations = IAnnotations(self.context)
        settings = annotations.setdefault(KEY, {})
        settings['header_list'] = [d['header'] for d in header_mapping]
        # we will keep making this bigger in case they switch between several CSVs
        settings.setdefault("matching_fields", {}).update(matching_fields)
        settings['primary_key'] = primary_key
        settings['object_type'] = object_type

        return True

    #TODO: replace with a report element on import that gives an extra button
    # to "Download CSV of changes". Requires we have hidden field of all changed
    # UIDs and then relookup all of those.
    # Report element can also display list of top 20 creations, top 20 updates etc
    # replaces info window
    @button.buttonAndHandler(
        _(
            "import___button_import_export",  # nopep8
            default=u"Import and Export Changes"))
    def handleImportExport(self, action):

        if not self.handleSaveImport(self, action):
            return False

        data, errors = self.extractData()
        if errors:
            return False

        # blank header or field means we don't want it
        header_mapping = [
            d for d in data['header_mapping'] if d['field'] and d['header']
        ]

        # export to csv file
        # import pdb; pdb.set_trace()
        if self.import_metadata["report"]:
            normalizer = getUtility(IIDNormalizer)
            random_id = normalizer.normalize(time.time())
            filename = "export_{0}.{1}".format(random_id, 'csv')
            dataset = export_file(self.import_metadata["report"],
                                  header_mapping, self.request)
            file = StringIO.StringIO()
            file.write(dataset.csv)
            attachment = file.getvalue()
            self.request.response.setHeader('content-type', 'text/csv')
            self.request.response.setHeader(
                'Content-Disposition', 'attachment; filename="%s"' % filename)
            self.request.response.setBody(attachment, lock=True)
            file.close()

        #self.request.response.redirect(self.context.absolute_url())

    @button.buttonAndHandler(
        _(
            "import___button_export_csv",  # nopep8
            default=u"CSV Export"))
    def handleExportCSV(self, action):
        [header_mapping, results] = self.getObjectsToExport()
        normalizer = getUtility(IIDNormalizer)
        random_id = normalizer.normalize(time.time())
        filename = "export_{0}.{1}".format(random_id, 'csv')
        dataset = export_file(results, header_mapping, self.request)
        #log.debug(filename)
        #log.debug(attachment)
        self.request.response.setHeader('content-type', 'text/csv')
        self.request.response.setHeader('Content-Disposition',
                                        'attachment; filename="%s"' % filename)
        self.request.response.setBody(dataset.csv, lock=True)
        return True

    @button.buttonAndHandler(
        _(
            "import___button_export_xlsx",  # nopep8
            default=u"XLSX Export"))
    def handleExportXLSX(self, action):
        [header_mapping, results] = self.getObjectsToExport()
        normalizer = getUtility(IIDNormalizer)
        random_id = normalizer.normalize(time.time())
        filename = "export_{0}.{1}".format(random_id, 'xlsx')
        dataset = export_file(results, header_mapping, self.request)
        #log.debug(filename)
        #log.debug(attachment)
        self.request.response.setHeader(
            'content-type',
            'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'
        )
        self.request.response.setHeader('Content-Disposition',
                                        'attachment; filename="%s"' % filename)
        self.request.response.setBody(dataset.xlsx, lock=True)
        return True

    @button.buttonAndHandler(u"Cancel")
    def handleCancel(self, action):
        api.portal.show_message(
            message=_(
                "import_message_cancel",  # nopep8
                default="Import canceled."),
            request=self.request,
            type="info")
        self.request.response.redirect(self.context.absolute_url())