def add_records_to_sync(self,folder_path): toolJsonData = FileUtils.returnJsonFromFiles( folder_path, "data.json") SyncHelperService.validate_sync_data_from_json(toolJsonData) sync_id = None type = None for rec in toolJsonData: sync_id = rec.get("sync_id") type = rec.get("type").lower() if rec is not None: # THIS IS USED IN CLEANER SERVICES rec["stored_folder_name"] = folder_path idd = self.syncDb.add_sync(rec) print " New record was added in Sync with _id :" + str(idd) + " and type :" + rec.get("type") + " and sync_id :" + rec.get("sync_id") else: raise ValueError( " No data found for :") + folder_path try: # Distribution_list can be empty # full_sync_flag is # mandatory full_sync_flag, distribution_list = SyncHelperService.get_distribution_list_and_status( self.result) SyncHelperService.add_notify( sync_id, distribution_list, mailer) except Exception as e_value: # catch *all* exceptions print 'checkPendingImports: An email will not be sent due to error :' + str(e_value)
def checkPendingImports(self): """Checking Pending Import """ try: onlyfiles = [ f for f in listdir(self.current_import_path) if isfile(join(self.current_import_path, f)) if "DPM_tools_manifest" in str(f) and not str(f).endswith("_done.zip") ] if onlyfiles is not None: if len(onlyfiles) <= 0: print "No pending zip files to process" return for selected_file in onlyfiles: try: file_path = join(self.current_import_path, selected_file) file_name = os.path.basename(file_path) file_name_without_ext = os.path.splitext(file_name)[0] print " Processing file :" + file_path if os.path.isfile( join(self.current_import_path, file_name_without_ext) + '_done.zip'): print join(self.current_import_path, file_name_without_ext) \ + '_done.zip' + ' was found. Deleting it' os.remove( join(self.current_import_path, file_name_without_ext) + '_done.zip') if os.path.exists( join(self.current_import_path, file_name_without_ext)): print join(self.current_import_path, file_name_without_ext) \ + ' was found. Deleting it' shutil.rmtree( join(self.current_import_path, file_name_without_ext)) print 'checkPendingImports : Am processing ' \ + file_path folder_path = \ os.path.normpath(FileUtils.unzipImportFile(file_path)) toolJsonData = FileUtils.returnJsonFromFiles( folder_path, 'data.json') SyncHelperService.validate_sync_data_from_json( toolJsonData, False) generalJsonData = FileUtils.returnJsonFromFiles( folder_path, "generalData.json") if generalJsonData is None: raise ValueError( "generalData.json was not found inside the zip file" ) self.distributionSync.CancelAllDistributions() for rec in toolJsonData: rec = self.updatePaths( join(self.current_import_small_path, os.path.basename(folder_path)), rec) # THIS IS USED IN CLEANER SERVICES rec["stored_folder_name"] = folder_path idd = self.distributionSync.AddDistribution(rec) print " New tool " + rec.get("tool_data").get( "name") + " was added in Sync with _id :" + str( idd) try: self.add_notify(self.result.get("distribution_list")) except Exception as e_value: # catch *all* exceptions print 'Email will not be sent due to error :' \ + str(e_value) FileUtils.renameFile( file_path, join(self.current_import_path, os.path.splitext(file_name)[0] + "_done.zip")) print 'checkPendingImports: Am done processing ' \ + join(self.current_import_path, os.path.splitext(file_name)[0] + '_done.zip') except Exception as e_value: # catch *all* exceptions print 'checkPendingImports: File :' \ + str(file_path) + \ ' was skipped due to error ' + str(e_value) FileUtils.renameFile( file_path, join( self.current_import_path, os.path.splitext(file_name)[0] + "_failed_as_" + str(e_value).replace(" ", "_") + "_done.zip")) except Exception as e_value: # catch *all* exceptions traceback.print_exc() print 'checkPendingImports: Error while unzipping pending files :' + str( e_value)
def upload_manual_sync_file(): file_path=None folder_path=None try: sync_id = None inserted_ids = [] # Get the name of the uploaded file file = request.files['file'] if file is None: raise ValueError("No file selected") filename = ('.' in file.filename and file.filename.rsplit('.', 1)[1] in ['zip']) if filename not in [True]: raise Exception("Invalid file .Please select file of type 'zip'") # Check if the file is one of the allowed types/extensions if file and filename: # Make the filename safe, remove unsupported chars filename = secure_filename(file.filename) file_name_without_ext = filename.split(".")[0] import_path = str(import_full_path) temp_folder_path=str(import_full_path + '/' + file_name_without_ext) if os.path.isfile(temp_folder_path + "_done.zip") or os.path.exists(temp_folder_path): raise Exception("This file was already requested") folder_path = temp_folder_path file_path = str(import_full_path + '/' + filename) if os.path.isfile(file_path): os.remove(file_path) file.save(file_path) folder_path = os.path.normpath( FileUtils.unzipImportFile(file_path)) toolJsonData = FileUtils.returnJsonFromFiles( folder_path, "data.json") SyncHelperService.validate_sync_data_from_json(toolJsonData) for rec in toolJsonData: # THIS IS USED IN CLEANER SERVICES rec["stored_folder_name"] = folder_path if request.form.get('callback_url'):rec["callback_url"]=request.form.get('callback_url') inserted_ids.append(syncDb.add_sync(rec)) if not sync_id: sync_id = rec.get("sync_id") FileUtils.renameFile(file_path, join( import_path, os.path.splitext(filename)[0] + "_done.zip")) if request.form.get("skip_process_ind","false").lower()=="true": return jsonify(json.loads(dumps({"result": "success", "message":"File uploaded successfully.","data":sync_id}))), 200 else: try: syncService.job_function() except Exception as e: # catch *all* exceptions print str(e) sync_data=syncService.analyse_sync_details(sync_id,False) return jsonify(json.loads(dumps({"result": "success", "message": "File was uploaded successfully. " + sync_data.get("added") + " entities were processed.Success: " + sync_data.get("success_count") + " Failed: " + sync_data.get("failed_count"), "data": sync_data.get("data")}))), 200 except Exception as e: # catch *all* exceptions if file_path is not None: if os.path.isfile(file_path): os.remove(file_path) if folder_path is not None: if os.path.exists(folder_path): shutil.rmtree(folder_path) if os.path.isfile(folder_path + "_done.zip"): os.remove(folder_path + "_done.zip") for ids in inserted_ids: syncDb.remove_sync(str(ids)) raise e