Esempio n. 1
0
    def create_backup(self, type='auto'):
        print 'Call to create_backup()'

        config = self.env['ir.config_parameter']
        hosts_str = config.get_param('sohovet.aws_hostname_list')
        hosts = hosts_str and hosts_str.split(',') or False
        if hosts and not socket.gethostname() in [
                host.strip() for host in hosts
        ]:
            return

        name = ('%s_%s.zip' %
                (self._cr.dbname, fields.Datetime.now())).replace(' ',
                                                                  '_').replace(
                                                                      ':', '-')

        self.clean_db()

        backup = tempfile.TemporaryFile()
        db.dump_db(self._cr.dbname, backup)
        backup.seek(0)

        k = self.get_aws_obj(name)
        k.set_contents_from_file(backup)
        backup.close()

        self.create({'name': name, 'type': type})
    def action_backup(self):
        """Run selected backups."""
        backup = None
        filename = self.filename(datetime.now())
        successful = self.browse()

        # Start with local storage
        for rec in self.filtered(lambda r: r.method == "local"):
            with rec.backup_log():
                # Directory must exist
                try:
                    os.makedirs(rec.folder)
                except OSError:
                    pass

                with open(os.path.join(rec.folder, filename),
                          'wb') as destiny:
                    # Copy the cached backup
                    if backup:
                        with open(backup) as cached:
                            shutil.copyfileobj(cached, destiny)
                    # Generate new backup
                    else:
                        with rec.custom_tempdir():
                            db.dump_db(self.env.cr.dbname, destiny)
                        backup = backup or destiny.name
                successful |= rec

        # Ensure a local backup exists if we are going to write it remotely
        sftp = self.filtered(lambda r: r.method == "sftp")
        if sftp:
            if backup:
                cached = open(backup)
            else:
                cached = db.dump_db(self.env.cr.dbname, None)

            with cached:
                for rec in sftp:
                    with rec.backup_log():
                        with rec.sftp_connection() as remote:
                            # Directory must exist
                            try:
                                remote.makedirs(rec.folder)
                            except pysftp.ConnectionException:
                                pass

                            # Copy cached backup to remote server
                            with remote.open(
                                    os.path.join(rec.folder, filename),
                                    "wb") as destiny:
                                shutil.copyfileobj(cached, destiny)
                        successful |= rec

        # Remove old files for successful backups
        successful.cleanup()
Esempio n. 3
0
    def action_backup(self):
        """Run selected backups."""
        backup = None
        filename = self.filename(datetime.now())
        successful = self.browse()

        # Start with local storage
        for rec in self.filtered(lambda r: r.method == "local"):
            with rec.backup_log():
                # Directory must exist
                try:
                    os.makedirs(rec.folder)
                except OSError:
                    pass

                with open(os.path.join(rec.folder, filename),
                          'wb') as destiny:
                    # Copy the cached backup
                    if backup:
                        with open(backup) as cached:
                            shutil.copyfileobj(cached, destiny)
                    # Generate new backup
                    else:
                        db.dump_db(self.env.cr.dbname, destiny)
                        backup = backup or destiny.name
                successful |= rec

        # Ensure a local backup exists if we are going to write it remotely
        sftp = self.filtered(lambda r: r.method == "sftp")
        if sftp:
            if backup:
                cached = open(backup)
            else:
                cached = tempfile.TemporaryFile()
                db.dump_db(self.env.cr.dbname, cached)

            with cached:
                for rec in sftp:
                    with rec.backup_log():
                        with rec.sftp_connection() as remote:
                            # Directory must exist
                            try:
                                remote.makedirs(rec.folder)
                            except pysftp.ConnectionException:
                                pass

                            # Copy cached backup to remote server
                            with remote.open(
                                    os.path.join(rec.folder, filename),
                                    "wb") as destiny:
                                shutil.copyfileobj(cached, destiny)
                        successful |= rec

        # Remove old files for successful backups
        successful.cleanup()
Esempio n. 4
0
    def create_backup(self, type='auto'):
        print 'Call to create_backup()'

        config = self.env['ir.config_parameter']
        hosts_str = config.get_param('sohovet.aws_hostname_list')
        hosts = hosts_str and hosts_str.split(',') or False
        if hosts and not socket.gethostname() in [host.strip() for host in hosts]:
            return

        name = ('%s_%s.zip' % (self._cr.dbname, fields.Datetime.now())).replace(' ', '_').replace(':', '-')

        self.clean_db()

        backup = tempfile.TemporaryFile()
        db.dump_db(self._cr.dbname, backup)
        backup.seek(0)

        k = self.get_aws_obj(name)
        k.set_contents_from_file(backup)
        backup.close()

        self.create({'name': name, 'type': type})
Esempio n. 5
0
    def schedule_backup(self, cr, user, context={}, dummy=""):
        conf_ids = self.search(cr, user, [])
        confs = self.browse(cr, user, conf_ids)
        for rec in confs:
            db_list = self.get_db_list(cr, user, [], rec.host, rec.port)
            if rec.name in db_list:
                try:
                    if not os.path.isdir(rec.bkp_dir):
                        os.makedirs(rec.bkp_dir)
                except:
                    raise
                #Create name for dumpfile.
                bkp_file = '%s_%s.zip' % (cr.dbname,
                                          time.strftime('%Y-%m-%d_%H-%M-%S'))
                file_path = os.path.join(rec.bkp_dir, bkp_file)
                uri = 'http://' + rec.host + ':' + rec.port
                conn = xmlrpclib.ServerProxy(uri + '/xmlrpc/db')
                bkp = ''
                try:
                    bkp = dump_db(cr.dbname, file_path)
                    ##bkp = execute(conn, 'dump', tools.config['admin_passwd'], rec.name)
                except:
                    _logger_backup.error(
                        "Couldn't backup database %s. Bad database administrator password for server running at http://%s:%s",
                        rec.name, rec.host, rec.port)
                    #pdb.set_trace()
                    continue
                #fp = open(file_path,'wb')

                #bkp = base64.decodestring(bkp)
                #fp.write(bkp)
                #fp.close()
            else:
                _logger_backup.error(
                    "database %s doesn't exist on http://%s:%s" %
                    (rec.name, rec.host, rec.port))
            _logger_backup.info("database %s backupped to:%s", cr.dbname,
                                file_path)

            #Check if user wants to write to SFTP or not.
            if rec.sftpwrite is True:
                try:
                    #Store all values in variables
                    dir = rec.bkp_dir
                    pathToWriteTo = rec.sftppath
                    ipHost = rec.sftpip
                    usernameLogin = rec.sftpusername
                    passwordLogin = rec.sftppassword
                    #Connect with external server over SFTP
                    srv = pysftp.Connection(host=ipHost,
                                            username=usernameLogin,
                                            password=passwordLogin)
                    #set keepalive to prevent socket closed / connection dropped error
                    srv._transport.set_keepalive(30)
                    #Move to the correct directory on external server. If the user made a typo in his path with multiple slashes (/odoo//backups/) it will be fixed by this regex.
                    pathToWriteTo = re.sub('([/]{2,5})+', '/', pathToWriteTo)
                    print(pathToWriteTo)
                    try:
                        srv.chdir(pathToWriteTo)
                    except IOError:
                        #Create directory and subdirs if they do not exist.
                        currentDir = ''
                        for dirElement in pathToWriteTo.split('/'):
                            currentDir += dirElement + '/'
                            try:
                                srv.chdir(currentDir)
                            except:
                                print(
                                    '(Part of the) path didn\'t exist. Creating it now at '
                                    + currentDir)
                                #Make directory and then navigate into it
                                srv.mkdir(currentDir, mode=777)
                                srv.chdir(currentDir)
                                pass
                    srv.chdir(pathToWriteTo)
                    #Loop over all files in the directory.
                    for f in os.listdir(dir):
                        fullpath = os.path.join(dir, f)
                        if os.path.isfile(fullpath):
                            print(fullpath)
                            srv.put(fullpath)

                    #Navigate in to the correct folder.
                    srv.chdir(pathToWriteTo)

                    #Loop over all files in the directory from the back-ups.
                    #We will check the creation date of every back-up.
                    for file in srv.listdir(pathToWriteTo):
                        #Get the full path
                        fullpath = os.path.join(pathToWriteTo, file)
                        #Get the timestamp from the file on the external server
                        timestamp = srv.stat(fullpath).st_atime
                        createtime = datetime.datetime.fromtimestamp(timestamp)
                        now = datetime.datetime.now()
                        delta = now - createtime
                        #If the file is older than the daystokeepsftp (the days to keep that the user filled in on the Odoo form it will be removed.
                        if delta.days >= rec.daystokeepsftp:
                            #Only delete files, no directories!
                            if srv.isfile(fullpath) and ".dump" in file:
                                print("Delete: " + file)
                                srv.unlink(file)
                    #Close the SFTP session.
                    srv.close()
                except Exception, e:
                    _logger_backup.debug(
                        'Exception! We couldn\'t back up to the FTP server..')
                    #At this point the SFTP backup failed. We will now check if the user wants
                    #an e-mail notification about this.
                    if rec.sendmailsftpfail:
                        try:
                            ir_mail_server = self.pool.get('ir.mail_server')
                            message = "Dear,\n\nThe backup for the server " + rec.host + " (IP: " + rec.sftpip + ") failed.Please check the following details:\n\nIP address SFTP server: " + rec.sftpip + "\nUsername: "******"\nPassword: "******"\n\nError details: " + tools.ustr(
                                e) + "\n\nWith kind regards"
                            msg = ir_mail_server.build_email(
                                "auto_backup@" + rec.name + ".com",
                                [rec.emailtonotify], "Backup from " +
                                rec.host + "(" + rec.sftpip + ") failed",
                                message)
                            ir_mail_server.send_email(cr, user, msg)
                        except Exception:
                            pass
            """Remove all old files (on local server) in case this is configured..
            This is done after the SFTP writing to prevent unusual behaviour:
            If the user would set local back-ups to be kept 0 days and the SFTP
            to keep backups xx days there wouldn't be any new back-ups added to the
            SFTP.
            If we'd remove the dump files before they're writen to the SFTP there willbe nothing to write. Meaning that if an user doesn't want to keep back-ups locally and only wants them on the SFTP (NAS for example) there wouldn't be any writing to the remote server if this if statement was before the SFTP write method right above this comment.
            """
            if rec.autoremove is True:
                dir = rec.bkp_dir
                #Loop over all files in the directory.
                for f in os.listdir(dir):
                    fullpath = os.path.join(dir, f)
                    timestamp = os.stat(fullpath).st_ctime
                    createtime = datetime.datetime.fromtimestamp(timestamp)
                    now = datetime.datetime.now()
                    delta = now - createtime
                    if delta.days >= rec.daystokeep:
                        #Only delete files (which are .dump), no directories.
                        if os.path.isfile(fullpath) and ".zip" in f:
                            print("Delete: " + fullpath)
                            os.remove(fullpath)
Esempio n. 6
0
 def dump_db(stream):
     return db.dump_db(database_obj.name, stream)
Esempio n. 7
0
    def schedule_backup(self, cr, user, context={},dummy=""):
        conf_ids= self.search(cr, user, [])
        confs = self.browse(cr,user,conf_ids)
        for rec in confs:
            db_list = self.get_db_list(cr, user, [], rec.host, rec.port)
            if rec.name in db_list:
                try:
                    if not os.path.isdir(rec.bkp_dir):
                        os.makedirs(rec.bkp_dir)
                except:
                    raise
                #Create name for dumpfile.
                bkp_file='%s_%s.zip' % (cr.dbname,time.strftime('%Y-%m-%d_%H-%M-%S'))
                file_path = os.path.join(rec.bkp_dir,bkp_file)
                uri = 'http://' + rec.host + ':' + rec.port
                conn = xmlrpclib.ServerProxy(uri + '/xmlrpc/db')
                bkp=''
                try:
                    bkp=dump_db(cr.dbname, file_path)
                    ##bkp = execute(conn, 'dump', tools.config['admin_passwd'], rec.name)
                except:
                    _logger_backup.error( "Couldn't backup database %s. Bad database administrator password for server running at http://%s:%s", rec.name, rec.host, rec.port)
                    #pdb.set_trace()
                    continue
                #fp = open(file_path,'wb')
                
                #bkp = base64.decodestring(bkp)
                #fp.write(bkp)
                #fp.close()
            else:
                _logger_backup.error( "database %s doesn't exist on http://%s:%s" %(rec.name, rec.host, rec.port))
            _logger_backup.info( "database %s backupped to:%s", cr.dbname,file_path)

            #Check if user wants to write to SFTP or not.
            if rec.sftpwrite is True:
                try:
                    #Store all values in variables
                    dir = rec.bkp_dir
                    pathToWriteTo = rec.sftppath
                    ipHost = rec.sftpip
                    usernameLogin = rec.sftpusername
                    passwordLogin = rec.sftppassword
                    #Connect with external server over SFTP
                    srv = pysftp.Connection(host=ipHost, username=usernameLogin,
password=passwordLogin)
                    #set keepalive to prevent socket closed / connection dropped error
                    srv._transport.set_keepalive(30)
                    #Move to the correct directory on external server. If the user made a typo in his path with multiple slashes (/odoo//backups/) it will be fixed by this regex.
                    pathToWriteTo = re.sub('([/]{2,5})+','/',pathToWriteTo)
                    print(pathToWriteTo)
                    try:
                        srv.chdir(pathToWriteTo)
                    except IOError:
                        #Create directory and subdirs if they do not exist.
                        currentDir = ''
                        for dirElement in pathToWriteTo.split('/'):
                            currentDir += dirElement + '/'
                            try:
                                srv.chdir(currentDir)
                            except:
                                print('(Part of the) path didn\'t exist. Creating it now at ' + currentDir)
                                #Make directory and then navigate into it
                                srv.mkdir(currentDir, mode=777)
                                srv.chdir(currentDir)
                                pass
                    srv.chdir(pathToWriteTo)
                    #Loop over all files in the directory.
                    for f in os.listdir(dir):
                        fullpath = os.path.join(dir, f)
                        if os.path.isfile(fullpath):
                            print(fullpath)
                            srv.put(fullpath)

                    #Navigate in to the correct folder.
                    srv.chdir(pathToWriteTo)

                    #Loop over all files in the directory from the back-ups.
                    #We will check the creation date of every back-up.
                    for file in srv.listdir(pathToWriteTo):
                        #Get the full path
                        fullpath = os.path.join(pathToWriteTo,file) 
                        #Get the timestamp from the file on the external server
                        timestamp = srv.stat(fullpath).st_atime
                        createtime = datetime.datetime.fromtimestamp(timestamp)
                        now = datetime.datetime.now()
                        delta = now - createtime
                        #If the file is older than the daystokeepsftp (the days to keep that the user filled in on the Odoo form it will be removed.
                        if delta.days >= rec.daystokeepsftp:
                            #Only delete files, no directories!
                            if srv.isfile(fullpath) and ".dump" in file:
                                print("Delete: " + file)
                                srv.unlink(file)
                    #Close the SFTP session.
                    srv.close()
                except Exception, e:
                    _logger_backup.debug('Exception! We couldn\'t back up to the FTP server..')
                    #At this point the SFTP backup failed. We will now check if the user wants
                    #an e-mail notification about this.
                    if rec.sendmailsftpfail:
                        try:
                            ir_mail_server = self.pool.get('ir.mail_server') 
                            message = "Dear,\n\nThe backup for the server " + rec.host + " (IP: " + rec.sftpip + ") failed.Please check the following details:\n\nIP address SFTP server: " + rec.sftpip + "\nUsername: "******"\nPassword: "******"\n\nError details: " + tools.ustr(e) + "\n\nWith kind regards"
                            msg = ir_mail_server.build_email("auto_backup@" + rec.name + ".com", [rec.emailtonotify], "Backup from " + rec.host + "(" + rec.sftpip + ") failed", message) 
                            ir_mail_server.send_email(cr, user, msg)
                        except Exception:
                            pass

            """Remove all old files (on local server) in case this is configured..
            This is done after the SFTP writing to prevent unusual behaviour:
            If the user would set local back-ups to be kept 0 days and the SFTP
            to keep backups xx days there wouldn't be any new back-ups added to the
            SFTP.
            If we'd remove the dump files before they're writen to the SFTP there willbe nothing to write. Meaning that if an user doesn't want to keep back-ups locally and only wants them on the SFTP (NAS for example) there wouldn't be any writing to the remote server if this if statement was before the SFTP write method right above this comment.
            """
            if rec.autoremove is True:
                dir = rec.bkp_dir
                #Loop over all files in the directory.
                for f in os.listdir(dir):
                    fullpath = os.path.join(dir, f)
                    timestamp = os.stat(fullpath).st_ctime
                    createtime = datetime.datetime.fromtimestamp(timestamp)
                    now = datetime.datetime.now()
                    delta  = now - createtime
                    if delta.days >= rec.daystokeep:
                        #Only delete files (which are .dump), no directories.
                        if os.path.isfile(fullpath) and ".zip" in f:
                            print("Delete: " + fullpath)
                            os.remove(fullpath)
Esempio n. 8
0
    def schedule_backup(self):
        confs = self.search([])
        for rec in confs:

            if rec.next_backup:
                next_backup = datetime.strptime(rec.next_backup,
                                                '%Y-%m-%d %H:%M:%S')
            else:
                next_backup = datetime.now()
            if next_backup < datetime.now():

                oerp = oerplib.OERP(rec.host,
                                    protocol='xmlrpc',
                                    port=rec.port,
                                    timeout=1200)

                db_list = oerp.db.list()
                database_name = rec.database_name
                if database_name in db_list:
                    try:
                        if not os.path.isdir(rec.backup_dir):
                            os.makedirs(rec.backup_dir)
                    except:
                        raise
                    bkp_file = '%s_%s.sql' % (database_name,
                                              time.strftime('%Y%m%d_%H_%M_%S'))
                    zip_file = '%s_%s.zip' % (database_name,
                                              time.strftime('%Y%m%d_%H_%M_%S'))
                    file_path = os.path.join(rec.backup_dir, bkp_file)
                    zip_path = os.path.join(rec.backup_dir, zip_file)
                    fp = open(file_path, 'wb')
                    try:
                        dump_db(database_name, fp, backup_format='dump')
                    except Exception as ex:
                        _logger.error(str(ex).decode('utf-8', 'ignore'),
                                      exc_info=True)
                        continue

                    fp.close()
                    with zipfile.ZipFile(zip_path, 'w') as zipped:
                        zipped.write(file_path)
                    zipped.close()
                    os.remove(file_path)

                    backup_env = self.env['backup.executed']

                    if rec.send_to_s3:
                        key = rec.send_for_amazon_s3(zip_path, zip_file)
                        if not key:
                            key = 'Erro ao enviar para o Amazon S3'
                        backup_env.create({
                            'backup_date': datetime.now(),
                            'configuration_id': rec.id,
                            's3_id': key,
                            'name': zip_file,
                            'state': 'concluded',
                            'local_path': zip_path
                        })
                    else:
                        backup_env.create({
                            'backup_date': datetime.now(),
                            'name': zip_file,
                            'configuration_id': rec.id,
                            'state': 'concluded',
                            'local_path': zip_path
                        })
                    rec._set_next_backup()
            else:
                pass
Esempio n. 9
0
              "This is what we get:\n"
              "%s" % (self.backups_path, e))
     _logger.warning(error)
 else:
     if not backup_name:
         backup_name = '%s_%s_%s.%s' % (
             self.name, bu_type, now.strftime('%Y%m%d_%H%M%S'),
             backup_format)
     backup_path = os.path.join(self.backups_path, backup_name)
     if os.path.isfile(backup_path):
         return {'error': "File %s already exists" % backup_path}
     backup = open(backup_path, 'wb')
     # backup
     try:
         db_ws.dump_db(self.name,
                       backup,
                       backup_format=backup_format)
     except:
         error = ('Unable to dump self. '
                  'If you are working in an instance with '
                  '"workers" then you can try restarting service.')
         _logger.warning(error)
         backup.close()
     else:
         backup.close()
         backup_vals = {
             'database_id': self.id,
             'name': backup_name,
             'path': self.backups_path,
             'date': now,
             'type': bu_type,