def export(self): global MUST_INCLUDED_ATTRIBUTES errors = [] # create zip in memory self.zip = utils.InMemoryZip() # defines Pipeline self.conversion = utils.Pipeline() if self.request and self.request.method == 'POST': id_ = self.context.absolute_url_path()[1:] exportType = self.request.get('exportFormat', None) if self.request.get('exportFields', None) and (exportType == 'csv' or exportType == 'combined'): # fields/keys to include headers = self.request.get('exportFields', None) # BUG in html checkbox input, which send value as a string if only one value have been checked if isinstance(headers, str): headers = [headers] headers = list(set(MUST_INCLUDED_ATTRIBUTES + headers)) else: # 'No check provided. Thus exporting whole content' headers = self.getheaders() ''' MUST_INCLUDED_ATTRIBUTES must present in headers and that too at first position of list ''' for element in reversed(MUST_INCLUDED_ATTRIBUTES): headers.insert(0, element) # results is a list of dicts objData = self.serialize(self.context) results = objData[:-1] if objData[-1] != '': errorLog = objData[-1] self.zip.append('errorLog.txt', errorLog) self.conversion.convertjson(self, results, headers) self.request.RESPONSE.setHeader('content-type', 'application/zip') cd = 'attachment; filename=%s.zip' % (id_) self.request.RESPONSE.setHeader('Content-Disposition', cd) return self.zip.read() else: raise ImportExportError('Invalid Request')
def export(self): global MUST_INCLUDED_ATTRIBUTES errors = [] # create zip in memory self.zip = utils.InMemoryZip() # defines Pipeline self.conversion = utils.Pipeline() if self.request and self.request.method == 'POST': id_ = self.context.absolute_url_path()[1:] exportType = self.request.get('exportFormat', None) if self.request.get('exportFields', None) and (exportType == 'csv' or exportType == 'combined'): # NOQA: E501 # fields/keys to include headers = self.request.get('exportFields', None) # BUG in html checkbox input, which # send value as a string if only one value have been checked if isinstance(headers, str): headers = [headers] else: # 'No check provided. Thus exporting whole content' headers = self.getheaders() # Just to make sure headers does not contain any attribute # from MUST_INCLUDED_ATTRIBUTES headers = list(set(headers) - set(MUST_INCLUDED_ATTRIBUTES)) # Appending MUST_INCLUDED_ATTRIBUTES in the beginning headers = MUST_INCLUDED_ATTRIBUTES + headers # results is a list of dicts objData = self.serialize(self.context) results = objData[:-1] if objData[-1] != '': errorLog = objData[-1] self.zip.append('errorLog.txt', errorLog) self.conversion.convertjson(self, results, headers) self.request.RESPONSE.setHeader('content-type', 'application/zip') cd = 'attachment; filename={arg}.zip'.format(arg=str(id_)) self.request.RESPONSE.setHeader('Content-Disposition', cd) return self.zip.read() else: raise ImportExportError('Invalid Request')
def setUp(self): self.data = TestData() self.context = self.layer['portal'] self.request = self.layer['request'] self.request['file'] = self.data.getzip() self.request['method'] = 'POST' self.view = getMultiAdapter((self.context, self.request), name="import-export") self.zip = utils.InMemoryZip() self.view.requestFile(self.data.getzip()) self.fileAnalyse = utils.fileAnalyse(self.view.files) self.pipeline = utils.Pipeline() self.mapping = utils.mapping(self)
def test_export(self): try: # return a string of zip file export = self.view.export() # creating a file-like object export = StringIO(export) f = utils.InMemoryZip() self.assertIn('plone.csv', f.getfiles(export)) except Exception as e: self.fail(e)
def test_convertjson(self): with api.env.adopt_roles(['Manager']): log = self.view.imports() exportFormat = { 'csv': 'plone.csvPK', 'combined': 'plone.csvPK', 'files': 'plone/14-ist.webm/14 IST.webmPK'} data_list = self.view.serialize(self.context)[:-1] csv_headers = self.pipeline.getcsvheaders(self.data.getData()) for formats in exportFormat.keys(): self.request['exportFormat'] = formats self.zip = utils.InMemoryZip() self.pipeline.convertjson(obj=self, data_list=data_list, csv_headers=csv_headers) # self.assertEqual(self.zip.read(), exportFormat[formats]) self.assertIn(exportFormat[formats], self.zip.read())
def setUp(self): self.InMemoryZip = utils.InMemoryZip() self.data = TestData()
def imports(self): global MUST_EXCLUDED_ATTRIBUTES global MUST_INCLUDED_ATTRIBUTES # global files # try: if self.request.method == 'POST': # request files file_ = self.request.get('file') # files are at self.files self.files = {} self.requestFile(file_) # file structure and analyser self.files = utils.fileAnalyse(self.files) if not self.files.getCsv(): raise ImportExportError('Provide a good csv file') # create zip in memory self.zip = utils.InMemoryZip() # defines Pipeline self.conversion = utils.Pipeline() # defines mapping for UID self.mapping = utils.mapping(self) # get list of existingPath self.getExistingpath() error_log = '' temp_log = '' # check for include attributes in advanced tab if self.request.get('importFields', None): # fields/keys to include include = self.request.get('importFields', None) # BUG in html checkbox input, which send value as a # string if only one value have been checked if isinstance(include, str): include = [include] include = list(set(MUST_INCLUDED_ATTRIBUTES + include)) else: # 'No check provided. Thus exporting whole content' include = None # convert csv to json data = self.conversion.converttojson(data=self.files.getCsv(), header=include) # invoke non-existent content, if any error_log += self.createcontent(data) # map old and new UID in memory self.mapping.mapNewUID(data) # deserialize for index in range(len(data)): obj_data = data[index] if not obj_data.get('path', None): error_log += 'pathError in {arg} \n'.format( arg=obj_data['path']) continue # get blob content into json data obj_data, temp_log = self.conversion.fillblobintojson( obj_data, self.files.getFiles(), self.mapping) error_log += temp_log # os.sep is preferrable to support multiple filesystem # return context of object object_context = self.getobjcontext(obj_data['path'].split( os.sep)) # all import error will be logged back if object_context: error_log += self.deserialize(object_context, obj_data) else: error_log += 'pathError for {arg}\n'.format( arg=obj_data['path']) self.request.RESPONSE.setHeader('content-type', 'application/text; charset=utf-8') return error_log else: raise ImportExportError('Invalid Request Method')
def imports(self): global MUST_EXCLUDED_ATTRIBUTES global MUST_INCLUDED_ATTRIBUTES # global files # try: if self.request.method == 'POST': # request files file_ = self.request.get('file') # get the defined import key self.primary_key = \ self.request.get('import_key', 'path') # match related self.settings, based on defined key self.new_content_action = \ self.request.get('new_content', 'add') self.matching_content_action = \ self.request.get('matching_content', 'update') self.existing_content_no_match_action = \ self.request.get('existing_content_no_match', 'keep') # files are at self.files self.files = {} self.requestFile(file_) # file structure and analyser self.files = utils.fileAnalyse(self.files) if not self.files.getCsv(): raise ImportExportError('Provide a good csv file') # create zip in memory self.zip = utils.InMemoryZip() # defines Pipeline self.conversion = utils.Pipeline() # defines mapping for UID self.mapping = utils.mapping(self) # get list of existingPath self.getExistingpath() error_log = '' temp_log = '' # check for include attributes in advanced tab if self.request.get('importFields', None): # fields/keys to include include = self.request.get('importFields', None) # BUG in html checkbox input, which send value as a # string if only one value have been checked if isinstance(include, str): include = [include] include = list(set(MUST_INCLUDED_ATTRIBUTES + include)) else: # 'No check provided. Thus exporting whole content' include = None # convert csv to json data = self.conversion.converttojson(data=self.files.getCsv(), header=include) error_log += self.processContentCreation(data=data) # map old and new UID in memory self.mapping.mapNewUID(data) self.reindexMatchedTraversalPaths() error_log += self.deleteNoMatchingContent() self.reindexMatchedTraversalPaths() error_log += self.deleteNoMatchingContent() # deserialize for index in range(len(data)): obj_data = data[index] path_ = obj_data.get('path', None) if not path_: error_log += 'pathError upon deseralizing the content for {arg} \n'.format( arg=obj_data['path']) continue obj_absolute_path = "/".join( self.getobjpath(path_.split(os.sep))) if obj_absolute_path not in self.matchedTraversalPaths: continue if path_ not in self.matchedTraversalPaths: continue # get blob content into json data obj_data, temp_log = self.conversion.fillblobintojson( obj_data, self.files.getFiles(), self.mapping) error_log += temp_log # os.sep is preferrable to support multiple filesystem # return context of object print obj_data object_context = self.getobjcontext(obj_data['path'].split( os.sep)) # all import error will be logged back if object_context: error_log += self.deserialize(object_context, obj_data) else: error_log += 'Error while attempting to update {arg}\n'.format( arg=obj_data['path']) self.request.RESPONSE.setHeader('content-type', 'application/text; charset=utf-8') cd = 'attachment; filename=import-log.txt' self.request.RESPONSE.setHeader('Content-Disposition', cd) return error_log else: raise ImportExportError('Invalid Request Method')