def export_xls(directory, filehandle, language, as_template, fieldmap=None): wb = xlwt.Workbook(encoding='utf-8') ws = wb.add_sheet(directory.title[:30]) fieldmap = fieldmap or get_map(directory) write_title(fieldmap, ws, language, directory) if not as_template: items = catalog.children(directory, fieldmap.typename) write_objects(items, fieldmap, ws, 1) wb.save(filehandle)
def export_xls(directory, filehandle, language, as_template): wb = xlwt.Workbook(encoding='utf-8') ws = wb.add_sheet(directory.title[:30]) fieldmap = get_map(directory) write_title(fieldmap, ws, language, directory) if not as_template: items = catalog.children(directory, fieldmap.typename) write_objects(items, fieldmap, ws, 1) wb.save(filehandle)
def import_xls(directory, workbook, error=lambda e: None): # Get the fieldmap defining hierarchies and the cell/field mapping fieldmap = get_map(directory) # Load the sorted values from the workbook try: values = get_values(workbook, fieldmap) except IndexError: error(_(u'Invalid XLS file')) return dict() # Fill the database if values: return generate_objects(directory, fieldmap, values, error) else: return dict()
def import_csv(self, action): # Extract form field values and errors from HTTP request data, errors = self.extractData() if errors: self.status = self.formErrorsMessage return # Delete all existing events self.context.manage_delObjects(self.context.objectIds()) workflow_tool = getToolByName(self.context, 'portal_workflow') fieldmap = get_map(self.context) io = StringIO(data['csv_file'].data) reader = unicode_csv_reader(io, delimiter=';', quotechar='"') # Skip header line reader.next() counter = 0 for row in reader: attributes = dict() for attr, ix in fieldmap.fieldmap.items(): if not attr in fieldmap.readonly: value = row[ix] unwrapped = fieldmap.get_unwrapper(ix=ix)(value) attributes[attr] = unwrapped # Unmapped fields are filled with defaults add_defaults(attributes, fieldmap) # Adjust coordinates coordinates = attributes['coordinates_json'] if coordinates: coordinates = coordinates.replace("'", '"') cords = json.loads(coordinates) latitude = cords[1][0] longitude = cords[1][1] cords[1][0] = longitude cords[1][1] = latitude attributes['coordinates_json'] = json.dumps(cords) # "What" category cats1 = [] if attributes['cat1']: for cat in attributes['cat1']: if cat: cats1.append(categories1[cat]) attributes['cat1'] = cats1 # "Where" category if attributes['town']: attributes['town'] = categories2.get( attributes['town'], attributes['town'] ) attributes['cat2'] = [attributes['town']] # Manipulate some attributes attributes['timezone'] = default_timezone() attributes['long_description'] = self.readable_html( attributes['long_description'] ) # Fetch image form URL image_url = row[-3] if image_url: response = urllib2.urlopen(image_url) image = response.read() attributes['image'] = NamedImage(image) # Fetach PDF from URL pdf_url = row[-2] if pdf_url: response = urllib2.urlopen(pdf_url) pdf_file = response.read() attributes['attachment_1'] = NamedFile(pdf_file) # Create event event = createContentInContainer( self.context, fieldmap.typename, **attributes ) # Log the events which span multiple days, they need to manually # adjusted by the client if attributes['start'].date() != attributes['end'].date(): log.info( '"%s" spans multiple days' % event.absolute_url() ) # Publish event workflow_tool.doActionFor(event, 'submit') workflow_tool.doActionFor(event, 'publish') counter += 1 self.status = u'Imported %s events' % counter