def run_autobackup_database(self, cr, uid, context=None): user = self.browse(cr, SUPERUSER_ID, uid, context) host = '127.0.0.1' port = '8021' foldername = '' ftp_url = (user.company_id.document_ftp_url).replace('ftp://','') ftp_user = user.company_id.document_ftp_user ftp_passwd = user.company_id.document_ftp_passwd for url in ftp_url.split('/'): if url.find(':') > 0: host, port = url.split(':')[0], url.split(':')[1] else: foldername += '/'+ url db_list = DB.exp_list() for db in db_list: backup_db = StringIO(DB.exp_dump(db)) values = { 'host': str(host), 'port': str(port), 'timeout': 10.0, 'foldername': foldername, 'backup_db': backup_db, 'ftp_user': ftp_user, 'ftp_passwd': ftp_passwd, 'db_name': db, } self.get_ftp(cr, uid, values, context)
def run_autobackup_database(self, cr, uid, context=None): _logger.info("Auto Backup Starts...") user = self.browse(cr, SUPERUSER_ID, uid, context) host = '127.0.0.1' port = '8021' foldername = '' ftp_url = (user.company_id.document_ftp_url).replace('ftp://', '') ftp_user = user.company_id.document_ftp_user ftp_passwd = user.company_id.document_ftp_passwd for url in ftp_url.split('/'): if url.find(':') > 0: host, port = url.split(':')[0], url.split(':')[1] else: foldername += '/' + url db_list = DB.exp_list() for db in db_list: backup_db = DB.exp_dump(db) values = { 'host': str(host), 'port': str(port), 'timeout': 10.0, 'foldername': foldername, 'backup_db': backup_db, 'ftp_user': ftp_user, 'ftp_passwd': ftp_passwd, 'db_name': db, } self.get_ftp(cr, uid, values, context) _logger.info("Auto Backup Completed...")
def backup_database(self): data = {} try: db_dump = base64.b64decode(db.exp_dump(self.name)) filename = "%(db_name)s %(timestamp)s.zip" % { 'db_name': self.name, 'timestamp': datetime.utcnow().strftime( "%Y-%m-%d_%H-%M-%SZ") } self._transport_backup(db_dump, filename=filename) data['status'] = 'success' except Exception, e: _logger.exception('An error happened during database %s backup' %(self.name)) data['status'] = 'fail' data['message'] = str(e)
def backup_database(self): data = {} try: db_dump = base64.b64decode(db.exp_dump(self.name)) filename = "%(db_name)s %(timestamp)s.zip" % { 'db_name': self.name, 'timestamp': datetime.utcnow().strftime("%Y-%m-%d_%H-%M-%SZ") } self._transport_backup(db_dump, filename=filename) data['status'] = 'success' except Exception, e: _logger.exception('An error happened during database %s backup' % (self.name)) data['status'] = 'fail' data['message'] = str(e)
except Exception, e: error = "Could not create folder %s for backups.\ This is what we get:\n\ %s" % (self.backups_path, e) _logger.warning(error) return {'error': error} backup_name = '%s_%s_%s.zip' % ( self.name, bu_type, now.strftime('%Y%m%d_%H%M%S')) backup_path = os.path.join(self.backups_path, backup_name) backup = open(backup_path, 'wb') # backup try: backup.write(base64.b64decode( db_ws.exp_dump(self.name))) except: error = 'Unable to dump Database. If you are working in an\ instance with "workers" then you can try \ restarting service.' _logger.warning(error) return {'error': error} else: backup.close() self.backup_ids.create({ 'database_id': self.id, 'name': backup_name, 'path': self.backups_path, 'date': now, 'type': bu_type, })
os.makedirs(self.backups_path) except Exception, e: error = "Could not create folder %s for backups.\ This is what we get:\n\ %s" % (self.backups_path, e) _logger.warning(error) return {'error': error} backup_name = '%s_%s_%s.zip' % (self.name, bu_type, now.strftime('%Y%m%d_%H%M%S')) backup_path = os.path.join(self.backups_path, backup_name) backup = open(backup_path, 'wb') # backup try: backup.write(base64.b64decode(db_ws.exp_dump(self.name))) except: error = 'Unable to dump Database. If you are working in an\ instance with "workers" then you can try \ restarting service.' _logger.warning(error) return {'error': error} else: backup.close() self.backup_ids.create({ 'database_id': self.id, 'name': backup_name, 'path': self.backups_path, 'date': now, 'type': bu_type,