def getCommancontent(self): # request files file_ = self.request.get('file') # files are at self.files self.files = {} self.requestFile(file_) # file structure and analyser self.files = utils.fileAnalyse(self.files) if not self.files.getCsv(): matrix = {'Error': 'No csv Provided'} # JS requires json dump matrix = json.dumps(matrix) return matrix # get path form csv_file conversion = utils.Pipeline() jsonList = conversion.converttojson(data=self.files.getCsv(), header=['path']) path_ = [] for element in jsonList: path_.append(element.get('path', None)) common_path = self.getCommonpath(path_) return common_path
def setUp(self): self.data = TestData() self.context = self.layer['portal'] self.request = self.layer['request'] self.request['file'] = self.data.getzip() self.request['method'] = 'POST' self.view = getMultiAdapter((self.context, self.request), name="import-export") self.view.requestFile(self.data.getzip()) self.fileAnalyse = utils.fileAnalyse(self.view.files)
def setUp(self): self.data = TestData() self.context = self.layer['portal'] self.request = self.layer['request'] self.request['file'] = self.data.getzip() self.request['method'] = 'POST' self.view = getMultiAdapter((self.context, self.request), name='import-export') self.zip = utils.InMemoryZip() self.view.requestFile(self.data.getzip()) self.fileAnalyse = utils.fileAnalyse(self.view.files) self.pipeline = utils.Pipeline() self.mapping = utils.mapping(self)
def test_fillblobintojson(self): # request files file_ = self.request.get('file') # files are at self.files self.view.files = {} self.view.requestFile(file_) # file structure and analyser self.files = utils.fileAnalyse(self.view.files) # convert csv to json data = self.pipeline.converttojson(data=self.files.getCsv(), header=[]) for index in range(len(data)): obj_data = data[index] obj_data, temp_log = self.pipeline.fillblobintojson( obj_data, self.files.getFiles(), self.mapping) if fnmatch.fnmatch(temp_log, ('*' + 'Error' + '*')): self.fail()
def getImportfields(self): global MUST_INCLUDED_ATTRIBUTES try: self.files = {} # request files file_ = self.request.get('file') # files are at self.files self.requestFile(file_) # file structure and analyser self.files = utils.fileAnalyse(self.files) if not self.files.getCsv(): raise ImportExportError('Provide a good csv file') csvData = self.files.getCsv() # convert csv to json conversion = utils.Pipeline() jsonData = conversion.converttojson(data=csvData) # get headers from jsonData headers = conversion.getcsvheaders(jsonData) headers = filter( lambda headers: headers not in MUST_INCLUDED_ATTRIBUTES, headers) # get matrix of headers matrix = self.getmatrix(headers=headers, columns=4) except Exception as e: matrix = {'Error': e.message} # JS requires json dump matrix = json.dumps(matrix) return matrix
def imports(self): global MUST_EXCLUDED_ATTRIBUTES global MUST_INCLUDED_ATTRIBUTES # global files # try: if self.request.method == 'POST': # request files file_ = self.request.get('file') # files are at self.files self.files = {} self.requestFile(file_) # file structure and analyser self.files = utils.fileAnalyse(self.files) if not self.files.getCsv(): raise ImportExportError('Provide a good csv file') # create zip in memory self.zip = utils.InMemoryZip() # defines Pipeline self.conversion = utils.Pipeline() # defines mapping for UID self.mapping = utils.mapping(self) # get list of existingPath self.getExistingpath() error_log = '' temp_log = '' # check for include attributes in advanced tab if self.request.get('importFields', None): # fields/keys to include include = self.request.get('importFields', None) # BUG in html checkbox input, which send value as a # string if only one value have been checked if isinstance(include, str): include = [include] include = list(set(MUST_INCLUDED_ATTRIBUTES + include)) else: # 'No check provided. Thus exporting whole content' include = None # convert csv to json data = self.conversion.converttojson(data=self.files.getCsv(), header=include) # invoke non-existent content, if any error_log += self.createcontent(data) # map old and new UID in memory self.mapping.mapNewUID(data) # deserialize for index in range(len(data)): obj_data = data[index] if not obj_data.get('path', None): error_log += 'pathError in {arg} \n'.format( arg=obj_data['path']) continue # get blob content into json data obj_data, temp_log = self.conversion.fillblobintojson( obj_data, self.files.getFiles(), self.mapping) error_log += temp_log # os.sep is preferrable to support multiple filesystem # return context of object object_context = self.getobjcontext(obj_data['path'].split( os.sep)) # all import error will be logged back if object_context: error_log += self.deserialize(object_context, obj_data) else: error_log += 'pathError for {arg}\n'.format( arg=obj_data['path']) self.request.RESPONSE.setHeader('content-type', 'application/text; charset=utf-8') return error_log else: raise ImportExportError('Invalid Request Method')
def imports(self): global MUST_EXCLUDED_ATTRIBUTES global MUST_INCLUDED_ATTRIBUTES # global files # try: if self.request.method == 'POST': # request files file_ = self.request.get('file') # get the defined import key self.primary_key = \ self.request.get('import_key', 'path') # match related self.settings, based on defined key self.new_content_action = \ self.request.get('new_content', 'add') self.matching_content_action = \ self.request.get('matching_content', 'update') self.existing_content_no_match_action = \ self.request.get('existing_content_no_match', 'keep') # files are at self.files self.files = {} self.requestFile(file_) # file structure and analyser self.files = utils.fileAnalyse(self.files) if not self.files.getCsv(): raise ImportExportError('Provide a good csv file') # create zip in memory self.zip = utils.InMemoryZip() # defines Pipeline self.conversion = utils.Pipeline() # defines mapping for UID self.mapping = utils.mapping(self) # get list of existingPath self.getExistingpath() error_log = '' temp_log = '' # check for include attributes in advanced tab if self.request.get('importFields', None): # fields/keys to include include = self.request.get('importFields', None) # BUG in html checkbox input, which send value as a # string if only one value have been checked if isinstance(include, str): include = [include] include = list(set(MUST_INCLUDED_ATTRIBUTES + include)) else: # 'No check provided. Thus exporting whole content' include = None # convert csv to json data = self.conversion.converttojson(data=self.files.getCsv(), header=include) error_log += self.processContentCreation(data=data) # map old and new UID in memory self.mapping.mapNewUID(data) self.reindexMatchedTraversalPaths() error_log += self.deleteNoMatchingContent() self.reindexMatchedTraversalPaths() error_log += self.deleteNoMatchingContent() # deserialize for index in range(len(data)): obj_data = data[index] path_ = obj_data.get('path', None) if not path_: error_log += 'pathError upon deseralizing the content for {arg} \n'.format( arg=obj_data['path']) continue obj_absolute_path = "/".join( self.getobjpath(path_.split(os.sep))) if obj_absolute_path not in self.matchedTraversalPaths: continue if path_ not in self.matchedTraversalPaths: continue # get blob content into json data obj_data, temp_log = self.conversion.fillblobintojson( obj_data, self.files.getFiles(), self.mapping) error_log += temp_log # os.sep is preferrable to support multiple filesystem # return context of object print obj_data object_context = self.getobjcontext(obj_data['path'].split( os.sep)) # all import error will be logged back if object_context: error_log += self.deserialize(object_context, obj_data) else: error_log += 'Error while attempting to update {arg}\n'.format( arg=obj_data['path']) self.request.RESPONSE.setHeader('content-type', 'application/text; charset=utf-8') cd = 'attachment; filename=import-log.txt' self.request.RESPONSE.setHeader('Content-Disposition', cd) return error_log else: raise ImportExportError('Invalid Request Method')