def get_queryset(self, *args, **kwargs): for cbo in ConfigBackup.objects.all(): fp = os.path.join(ConfigBackup.cb_dir(), cbo.filename) if not os.path.isfile(fp): cbo.delete() try: with gzip.open(fp, "rb") as f: f.read() except IOError as e: logger.exception(e) logger.info( "The file {} is not gzipped, so compress it now.".format( cbo.filename ) ) try: o, err, rc = run_command(["/usr/bin/gzip", fp], log=True) except Exception as e: # gzip returns rc == 2 if the destination file already exists # so let's return an explicit error message to the user for this case if e.rc == 2: e_msg = ( "A destination file for the config backup file with the same " "name ({}) already exists. Please remove it and try again.".format( fp ) ) # Delete file from system run_command(["/bin/rm", "-f", fp], log=True) else: e_msg = ( "The backup config file ({}) couldn't be gzipped.\n" "Reload the page to refresh the list of backups".format(fp) ) cbo.delete() handle_exception(Exception(e_msg), self.request) gz_name = "{}.gz".format(cbo.filename) cbo.filename = gz_name fp = os.path.join(ConfigBackup.cb_dir(), cbo.filename) cbo.md5sum = md5sum(fp) cbo.size = os.stat(fp).st_size cbo.save() fp_md5sum = md5sum(fp) if fp_md5sum != cbo.md5sum: logger.error( "md5sum mismatch for {}. cbo: {} file: {}. " "Deleting dbo.".format(cbo.filename, cbo.md5sum, fp_md5sum) ) cbo.delete() return ConfigBackup.objects.filter().order_by("-id")
def backup_config(): models = {'storageadmin': ['user', 'group', 'sambashare', 'sambacustomconfig', 'netatalkshare', 'nfsexport', 'nfsexportgroup', 'advancednfsexport', ], 'smart_manager': ['service', ], } model_list = [] for a in models: for m in models[a]: model_list.append('%s.%s' % (a, m)) logger.debug('model list = %s' % model_list) filename = ('backup-%s.json' % datetime.now().strftime('%Y-%m-%d-%H%M%S')) cb_dir = ConfigBackup.cb_dir() if (not os.path.isdir(cb_dir)): os.mkdir(cb_dir) fp = os.path.join(cb_dir, filename) with open(fp, 'w') as dfo: call_command('dumpdata', *model_list, stdout=dfo) dfo.write('\n') call_command('dumpdata', database='smart_manager', *model_list, stdout=dfo) run_command(['/usr/bin/gzip', fp]) gz_name = ('%s.gz' % filename) fp = os.path.join(cb_dir, gz_name) size = os.stat(fp).st_size cbo = ConfigBackup(filename=gz_name, md5sum=md5sum(fp), size=size) cbo.save() return cbo
def delete(self, request, backup_id): with self._handle_exception(request): cbo = self._validate_input(backup_id, request) fp = os.path.join(ConfigBackup.cb_dir(), cbo.filename) if os.path.isfile(fp): os.remove(fp) cbo.delete() return Response()
def delete(self, request, backup_id): with self._handle_exception(request): cbo = self._validate_input(backup_id, request) fp = os.path.join(ConfigBackup.cb_dir(), cbo.filename) if (os.path.isfile(fp)): os.remove(fp) cbo.delete() return Response()
def get_queryset(self, *args, **kwargs): for cbo in ConfigBackup.objects.all(): fp = os.path.join(ConfigBackup.cb_dir(), cbo.filename) if (not os.path.isfile(fp)): cbo.delete() fp_md5sum = md5sum(fp) if (fp_md5sum != cbo.md5sum): logger.error('md5sum mismatch for {}. cbo: {} file: {}. ' 'Deleting dbo'.format(cbo.filename, cbo.md5sum, fp_md5sum)) cbo.delete() return ConfigBackup.objects.filter().order_by('-id')
def get_queryset(self, *args, **kwargs): for cbo in ConfigBackup.objects.all(): fp = os.path.join(ConfigBackup.cb_dir(), cbo.filename) if (not os.path.isfile(fp)): cbo.delete() fp_md5sum = md5sum(fp) if (fp_md5sum != cbo.md5sum): logger.error('md5sum mismatch for %s. cbo: %s file: %s. ' 'Deleting dbo' % (cbo.filename, cbo.md5sum, fp_md5sum)) cbo.delete() return ConfigBackup.objects.filter().order_by('-id')
def get_queryset(self, *args, **kwargs): for cbo in ConfigBackup.objects.all(): fp = os.path.join(ConfigBackup.cb_dir(), cbo.filename) if not os.path.isfile(fp): cbo.delete() fp_md5sum = md5sum(fp) if fp_md5sum != cbo.md5sum: logger.error("md5sum mismatch for {}. cbo: {} file: {}. " "Deleting dbo".format(cbo.filename, cbo.md5sum, fp_md5sum)) cbo.delete() return ConfigBackup.objects.filter().order_by("-id")
def backup_config(): models = { "storageadmin": [ "user", "group", "sambashare", "sambacustomconfig", "netatalkshare", "nfsexport", "nfsexportgroup", "advancednfsexport", 'rockon', 'dcontainer', 'dcustomconfig', 'dimage', 'dcontainerenv', 'dcontainerlabel', 'dvolume', 'dport', 'containeroption', 'dcontainerlink', 'dcontainerargs', 'dcontainerdevice', "share", ], "smart_manager": ["service", "servicestatus", "taskdefinition"], } model_list = [] for a in models: for m in models[a]: model_list.append("{}.{}".format(a, m)) filename = "backup-{}.json".format( datetime.now().strftime("%Y-%m-%d-%H%M%S")) cb_dir = ConfigBackup.cb_dir() if not os.path.isdir(cb_dir): os.mkdir(cb_dir) fp = os.path.join(cb_dir, filename) with open(fp, "w") as dfo: call_command("dumpdata", *model_list, stdout=dfo) dfo.write("\n") call_command("dumpdata", database="smart_manager", *model_list, stdout=dfo) run_command(["/usr/bin/gzip", fp]) gz_name = "{}.gz".format(filename) fp = os.path.join(cb_dir, gz_name) size = os.stat(fp).st_size cbo = ConfigBackup(filename=gz_name, md5sum=md5sum(fp), size=size) cbo.save() return cbo
def post(self, request, format=None): with self._handle_exception(request): filename = request.data['file-name'] file_obj = request.data['file'] if (ConfigBackup.objects.filter(filename=filename).exists()): msg = ('Config backup(%s) already exists. Uploading a ' 'duplicate is not allowed.' % filename) handle_exception(Exception(msg), request) cbo = ConfigBackup.objects.create(filename=filename, config_backup=file_obj) cb_dir = ConfigBackup.cb_dir() if not os.path.isdir(cb_dir): os.mkdir(cb_dir) fp = os.path.join(cb_dir, filename) cbo.md5sum = md5sum(fp) cbo.size = os.stat(fp).st_size cbo.save() return Response(ConfigBackupSerializer(cbo).data)
def backup_config(): models = { 'storageadmin': [ 'user', 'group', 'sambashare', 'sambacustomconfig', 'netatalkshare', 'nfsexport', 'nfsexportgroup', 'advancednfsexport', ], 'smart_manager': [ 'service', ], } model_list = [] for a in models: for m in models[a]: model_list.append('%s.%s' % (a, m)) logger.debug('model list = %s' % model_list) filename = ('backup-%s.json' % datetime.now().strftime('%Y-%m-%d-%H%M%S')) cb_dir = ConfigBackup.cb_dir() if (not os.path.isdir(cb_dir)): os.mkdir(cb_dir) fp = os.path.join(cb_dir, filename) with open(fp, 'w') as dfo: call_command('dumpdata', *model_list, stdout=dfo) dfo.write('\n') call_command('dumpdata', database='smart_manager', *model_list, stdout=dfo) run_command(['/usr/bin/gzip', fp]) gz_name = ('%s.gz' % filename) fp = os.path.join(cb_dir, gz_name) size = os.stat(fp).st_size cbo = ConfigBackup(filename=gz_name, md5sum=md5sum(fp), size=size) cbo.save() return cbo
def post(self, request, format=None): with self._handle_exception(request): filename = request.data['file-name'] file_obj = request.data['file'] if (ConfigBackup.objects.filter(filename=filename).exists()): msg = ('Config backup ({}) already exists. Uploading a ' 'duplicate is not allowed.').format(filename) handle_exception(Exception(msg), request) cbo = ConfigBackup.objects.create( filename=filename, config_backup=file_obj ) cb_dir = ConfigBackup.cb_dir() if not os.path.isdir(cb_dir): os.mkdir(cb_dir) fp = os.path.join(cb_dir, filename) cbo.md5sum = md5sum(fp) cbo.size = os.stat(fp).st_size cbo.save() return Response(ConfigBackupSerializer(cbo).data)