def clean_sync(self, now): sync_data_to_archive = [] sync_archive_dir = os.path.join(archives_full_path, "sync", now.strftime("%d%m%Y%H%M%S")) sync_file_name = os.path.join(sync_archive_dir, now.strftime("%d%m%Y%H%M%S") + ".json") for rec in self.sync.sync_all(): if (now - rec['_id'].generation_time.replace(tzinfo=None) ).days > self.olderthandays: if set(list(self.sync.sync_distinct_status( rec['sync_id']))) <= set(["success", "skipped"]): sync_data_to_archive.append( json.loads(Utils.JSONEncoder().encode(rec))) print "Document of Collection Sync " + str( rec['_id']) + " was archived" if sync_data_to_archive: FileUtils.mkdirs([sync_archive_dir], True) FileUtils.jsontoFile(sync_file_name, sync_data_to_archive) FileUtils.createZipFile(sync_archive_dir, sync_archive_dir) shutil.rmtree(sync_archive_dir, True) for rec in sync_data_to_archive: print "Document of Collection Sync" + str( rec['_id']) + "was removed" self.sync.remove_sync(str(rec['_id'])) # WE WILL DELETE FOLDERS HERE.THE FILES ARE NOT REQUIRED SO # THEY WERE CLEANED AS PART OF clean_old_data folder_list_to_clean = [import_full_path] if rec.get("stored_folder_name"): self.clean_old_data( folder_list_to_clean, os.path.basename(rec.get("stored_folder_name")), 0)
def validate_account_id(account_id): result=None if Utils.is_valid_obj_id(account_id): result=AccountsDb.get_account(account_id) else: result=AccountsDb.get_account_by_name(account_id) if result is None: raise Exception("Account Id provided is invalid") return str(result.get("_id"))
def validate_machine_type(machine_type): result = None if Utils.is_valid_obj_id(machine_type): result = machineTypeDb.get_machine_type_by_id(machine_type) else: result = machineTypeDb.get_machine_type_by_name(machine_type) if result is None: raise Exception("Machine Type provided is invalid") return str(result.get("_id"))
def get(self, oid): """ Get all auditing attributes """ audit_data = auditdb.get_audit_by_id(oid) return json.loads(Utils.JSONEncoder().encode({ "result": "success", "data": audit_data })), 200
def get(self,_id=None): for plugin in list_plugins().get("all")[0].get("data"): if str(_id) == str(plugin.get("_id").get("$oid")): if "DeploymentPlugin" in plugin.get("plugin_name"): plugin["file_contents"]=open(os.path.join(deployment_plugin_full_path,plugin.get("plugin_name"))+".py", "r").readlines() plugin["file_path"]=os.path.join(deployment_plugin_static_path,plugin.get("plugin_name"))+".py" elif "SyncPlugin" in plugin.get("plugin_name"): plugin["file_contents"]=open(os.path.join(sync_plugin_full_path,plugin.get("plugin_name"))+".py", "r").readlines() plugin["file_path"]=os.path.join(sync_plugin_static_path,plugin.get("plugin_name"))+".py" elif "RepositoryPlugin" in plugin.get("plugin_name"): plugin["file_contents"]=open(os.path.join(repository_plugin_full_path,plugin.get("plugin_name"))+".py", "r").readlines() plugin["file_path"]=os.path.join(repository_plugin_static_path,plugin.get("plugin_name"))+".py" data = exitPointPluginsDB.get_by_plugin_name(plugin.get("plugin_name")) if data : plugin.update(data) return json.loads(Utils.JSONEncoder().encode({"result": "success", "message": "","data":plugin})), 200 raise Exception("No Plugin with _id: "+_id+" was found")
def get(self,sync_id): filter_condition = {} limit = int(request.args.get('perpage', "30")) page = int(request.args.get('page', "0")) skip = page * limit if request.args.get('status', None): status_list = request.args.get("status").split(",") filter_condition["status"] = {"$in" : status_list} if request.args.get('operation', None): operation_list = request.args.get("operation").split(",") filter_condition["operation"] = {"$in" : operation_list} filter_condition["sync_id"] = sync_id sync_data = syncDb.get_sync_by_filter(filter_condition, skip, limit) new=0 retry=0 compared=0 success=0 failed=0 skipped=0 sync_data.rewind() total_data=syncDb.get_sync_by_sync_id(sync_id) total= len(list(total_data)) if total==0: raise Exception ("No sync request found with the sync id provided: " + sync_id) total_data.rewind() for sync in total_data: if sync.get("status").lower() == "new": new+=1 elif sync.get("status").lower() == "retry": retry+=1 elif sync.get("status").lower() == "compared": compared+=1 elif sync.get("status").lower() == "success": success+=1 elif sync.get("status").lower() == "failed": failed+=1 elif sync.get("status").lower() == "skipped": skipped+=1 if limit == 0: limit = total status="success" if failed >0: status = "failed" elif new+retry+compared >0: status = "running" return json.loads(Utils.JSONEncoder().encode({"result": "success", "data": {"data": list(sync_data),"new": new,"retry": retry,"compared": compared,"success": success, "failed": failed,"skipped":skipped, "total": total,"status": status, "page": page, "page_total": math.ceil((total/float(limit)))}})), 200
def createTable(obj : DBEntity.DbEntity): classProperties = Utils.getPublicProperties(obj) className = obj.__class__.__name__ columnsString = "" index = 0 for prop in classProperties: if(len(classProperties) - 1 == index): columnsString += f"{prop} text " break columnsString += f"{prop} text, " index += 1 sql = f"CREATE TABLE {className} ({columnsString})" c = __conn.cursor() c.execute(sql)
def get(self): """ Get all auditing attributes """ limit = int(request.args.get('perpage', "0")) page = int(request.args.get('page', "0")) user = request.args.get('user', None) api_type = request.args.get('apitype', None) response_status_code = request.args.get('responsestatuscode', None) request_type = request.args.get('requesttype', None) filter = {} if user: user = user.split(",") if "any" not in user: filter["user"] = {} filter["user"]["$in"] = user if api_type: api_type = api_type.split(",") if "any" not in api_type: filter["api_type"] = {} filter["api_type"]["$in"] = api_type if response_status_code: response_status_code = response_status_code.split(",") if "any" not in response_status_code: response_status_code = map(int, response_status_code) filter["response_status_code"] = {} filter["response_status_code"]["$in"] = response_status_code if request_type: request_type = request_type.split(",") if "any" not in request_type: filter["request_type"] = {} filter["request_type"]["$in"] = request_type skip = page * limit return json.loads(Utils.JSONEncoder().encode({ "result": "success", "data": list(auditdb.get_all(skip, limit, filter)), "message": "Auditing records were retrieved successfully" })), 200
def clean_auditing(self, now): auditing_data_to_archive = [] auditing_archive_dir = os.path.join(archives_full_path, "auditing", now.strftime("%d%m%Y%H%M%S")) auditing_file_name = os.path.join( auditing_archive_dir, now.strftime("%d%m%Y%H%M%S") + ".json") for rec in self.auditingDB.get_all(): if (now - rec['_id'].generation_time.replace(tzinfo=None) ).days > self.olderthandays: auditing_data_to_archive.append( json.loads(Utils.JSONEncoder().encode(rec))) print "Document of Collection Auditing " + str( rec['_id']) + " was archived" if auditing_data_to_archive: FileUtils.mkdirs([auditing_archive_dir], True) FileUtils.jsontoFile(auditing_file_name, auditing_data_to_archive) FileUtils.createZipFile(auditing_archive_dir, auditing_archive_dir) shutil.rmtree(auditing_archive_dir, True) self.auditingDB.remove_all_older_than_date(self.olderthandays)