Beispiel #1
0
def translate_endpoint(original, foreign, only_path=False):
    path = '/translate?original=%s&foreign=%s' % (q(original), q(foreign))

    if only_path:
        return path
    else:
        return listings_endpoint(path)
Beispiel #2
0
def translate_endpoint(original, foreign, only_path = False):
    path = '/translate?original=%s&foreign=%s' % ( q(original), q(foreign) )

    if only_path:
        return path
    else:
        return listings_endpoint(path)
Beispiel #3
0
def outlines_api_url(offering):
    """
    The URL for info in the API.
    """
    from urllib import quote_plus as q

    args = {
        'year': q(str(int(offering.semester.name[0:3]) + 1900)),
        'term': q(Semester.label_lookup[offering.semester.name[3]].lower()),
        'dept': q(offering.subject.lower()),
        'number': q(offering.number.lower()),
        'section': q(offering.section.lower()),
    }
    qs = 'year={year}&term={term}&dept={dept}&number={number}&section={section}'.format(**args)
    return OUTLINES_BASE_URL + '?' + qs
 def test_invoke_factory_blobstorage_and_demostorage(self):
     import os
     from urllib import quote as q
     from ZODB.blob import BlobStorage
     from ZODB.DemoStorage import DemoStorage
     from ZODB.FileStorage import FileStorage
     from zope.proxy import getProxiedObject
     DB_FILE = os.path.join(self.tmpdir, 'db.db')
     BLOB_DIR = os.path.join(self.tmpdir, 'blob')
     self.failIf(os.path.exists(DB_FILE))
     resolver = self._makeOne()
     k, args, kw, factory = resolver(
         'file://%s/db.db?quota=200&demostorage=true'
         '&blobstorage_dir=%s/blob'
         '&blobstorage_layout=bushy' % (self.tmpdir, q(self.tmpdir)))
     self.assertEqual(k,
                      (('%s/db.db' % self.tmpdir,),
                       (('blobstorage_dir', '%s/blob' % self.tmpdir),
                        ('blobstorage_layout', 'bushy'),
                        ('demostorage', 1),
                        ('quota', 200),
                       ),
                       (('cache_size', 10000),
                        ('database_name', 'unnamed'),
                        ('pool_size', 7)
                       ),
                      )
                     )
     db = factory()
     self.failUnless(isinstance(db._storage, BlobStorage))
     self.failUnless(isinstance(getProxiedObject(db._storage), DemoStorage))
     self.failUnless(isinstance(getProxiedObject(db._storage)._base,
                                FileStorage))
     self.failUnless(os.path.exists(DB_FILE))
     self.failUnless(os.path.exists(BLOB_DIR))
Beispiel #5
0
 def _test(self, arg):
     """tests a command on a different bunny1 host.  usage: _test [fully-qualified-bunny1-url] [command]"""
     (bunny1_url, arg) = arg.split(None, 1)
     if not bunny1_url.endswith("?"):
         bunny1_url += "?"
     save("bunny1testurl", bunny1_url)
     raise HTTPRedirect(bunny1_url + q(arg))
Beispiel #6
0
 def _test(self, arg):
     """tests a command on a different bunny1 host.  usage: _test [fully-qualified-bunny1-url] [command]"""
     (bunny1_url, arg) = arg.split(None, 1)
     if not bunny1_url.endswith("?"):
         bunny1_url += "?"
     save("bunny1testurl", bunny1_url)
     raise HTTPRedirect(bunny1_url + q(arg))
Beispiel #7
0
 def test_invoke_factory_blobstorage_and_demostorage(self):
     import os
     from urllib import quote as q
     from ZODB.DemoStorage import DemoStorage
     DB_FILE = os.path.join(self.tmpdir, 'db.db')
     BLOB_DIR = os.path.join(self.tmpdir, 'blob')
     self.assertFalse(os.path.exists(DB_FILE))
     resolver = self._makeOne()
     factory, dbkw = resolver(
         'file://%s/db.db?quota=200&demostorage=true'
         '&blobstorage_dir=%s/blob'
         '&blobstorage_layout=bushy' % (self.tmpdir, q(self.tmpdir)))
     storage = factory()
     self.assertTrue(isinstance(storage, DemoStorage))
     self.assertTrue(os.path.exists(DB_FILE))
     self.assertTrue(os.path.exists(BLOB_DIR))
Beispiel #8
0
 def test_invoke_factory_blobstorage_and_demostorage(self):
     import os
     from urllib import quote as q
     from ZODB.DemoStorage import DemoStorage
     DB_FILE = os.path.join(self.tmpdir, 'db.db')
     BLOB_DIR = os.path.join(self.tmpdir, 'blob')
     self.assertFalse(os.path.exists(DB_FILE))
     resolver = self._makeOne()
     factory, dbkw = resolver('file://%s/db.db?quota=200&demostorage=true'
                              '&blobstorage_dir=%s/blob'
                              '&blobstorage_layout=bushy' %
                              (self.tmpdir, q(self.tmpdir)))
     storage = factory()
     self.assertTrue(isinstance(storage, DemoStorage))
     self.assertTrue(os.path.exists(DB_FILE))
     self.assertTrue(os.path.exists(BLOB_DIR))
Beispiel #9
0
 def g(self, arg):
     """does a google search.  we could fallback to yubnub, but why do an unnecessary roundtrip for something as common as a google search?"""
     return GOOGLE_SEARCH_URL + q(arg)
    def authenticate(self):
        url = URL_BASE % {'host': self.host, 'resource': HILLSTONE_LOGIN_PATH}
        headers = {'Content-Length': '0', 'Accept': HEADER_CONTENT_TYPE_JSON}
        headers.update(HEADER_CONTENT_TYPE_JSON)
        self.token = None
        try:
            response = self._request("POST",
                                     url,
                                     headers=headers,
                                     data=jsonutils.dumps(self.auth))
        except Exception as e:
            with excutils.save_and_reraise_exception():
                LOG.error(e)

        if response is not None and response.get('success', False):
            self.token = response.get('result').get('token')
            self.result = response.get('result')

            self.cookie = COOKIE_CONTENT.format(
                token=q(self.token),
                platform=q(self.result.get('platform')),
                hw_platform=q(self.result.get('hw_platform')),
                host_name=q(self.result.get('host_name')),
                company=q(self.result.get('company')),
                oemid=q(self.result.get('oemId')),
                vsysid=q(self.result.get('vsysId')),
                vsysName=q(self.result.get('vsysName')),
                role=q(self.result.get('role')),
                license=q(self.result.get('license')),
                httpProtocol=q(self.result.get('httpProtocol')),
                soft_version=q(self.result['sysInfo'].get('soft_version')),
                sw_version=q(self.result['sysInfo'].get('sw_version')),
                username='******',
                overseaLicense=q(self.result.get('overseaLicense')),
            )
            return True
        return False
def main(verbose=False, dry_run=False):
  """
    Core of the backup script which implement the backup strategy.
  """

  def isSSHPasswordLess(host, user=None, port=22):
    """
      This method test if a ssh authentification on a remote machine can be done via a
      rsa-key/certificate or require a password.
    """
    # If no user given try "user-less" connection
    user_string = ''
    if user not in (None, ''):
      user_string = "%s@" % user
    TEST_STRING = "SSH KEY AUTH OK"
    test_cmd = """ssh -p %s %s%s "echo '%s'" """ % (port, user_string, host, TEST_STRING)
    if verbose:
      print " INFO - run `%s`..." % test_cmd
    ssh = pexpect.spawn(test_cmd, timeout=TIMEOUT)
    time.sleep(1)
    if verbose:
      import StringIO
      ssh_log = StringIO.StringIO()
      ssh.log_file = ssh_log
    ret_code = ssh.expect([TEST_STRING, '.ssword:*', pexpect.EOF, pexpect.TIMEOUT])
    time.sleep(1)
    password_less = None
    if ret_code == 0:
      password_less = True
    elif ret_code == 1:
      password_less = False
    else:
      print "ERROR - SSH server '%s:%s' is unreachable" % (host, port)
    if verbose:
      nice_log(ssh_log.getvalue(), 'ssh')
      ssh_log.close()
    ssh.close()
    if password_less:
      print " INFO - SSH connection to '%s:%s' is password-less" % (host, port)
    else:
      print " INFO - SSH connection to '%s:%s' require password" % (host, port)
    return password_less


  ######################
  # Self checking phase
  ######################

  # Announce the first phase
  print "=" * 40
  print "Backup script self-checking phase"
  print "=" * 40

  # Check that we are running this script on a UNIX system
  from os import name as os_name
  if os_name != 'posix':
    print "FATAL - This script doesn't support systems other than POSIX's"
    sys.exit(1)

  # Check that every command is installed
  checkCommand(['rdiff-backup', 'rm', 'tar', 'bzip2'])

  # Check existence of main backup folder
  if not exists(abspath(BACKUP_DIR)):
    print "FATAL - Main backup folder '%s' does't exist !" % BACKUP_DIR
    sys.exit(1)

  # This variable indicate if pexpect module is required or not
  is_pexpect_required = False

  # Check datas and requirement for each backup
  # Doing this right now is nicer to the user: thanks to this he doesn't need to wait the end of the (X)th backup to get the error about the (X+1)th
  for backup in backup_list:
    # Normalize backup type
    backup_type = backup['type'].lower().strip()
    if backup_type.find('ftps') != -1:
      backup_type = 'FTPS'
    elif backup_type.find('ftp') != -1:
      backup_type = 'FTP'
    elif backup_type == 'ssh':
      backup_type = 'SSH'
    elif backup_type.find('mysql') != -1:
      if backup_type.find('ssh') != -1:
        backup_type = 'MYSQLDUMP+SSH'
      else:
        backup_type = 'MYSQLDUMP'
    else:
      print "ERROR - Backup type '%s' for '%s' is unrecognized: ignore it." % (backup['type'], title)
      # Reset backup type
      backup['type'] = ''
      continue
    backup['type'] = backup_type
    # Check if pexpect is required
    if backup_type.find('SSH') != -1:
      is_pexpect_required = True
    # Check requirements
    REQUIRED_COMMANDS = { 'FTP'          : 'lftp'
                        , 'FTPS'         : 'lftp'
                        , 'SSH'          : ['rsync', 'ssh']
                        , 'MYSQLDUMP'    : 'mysqldump'
                        , 'MYSQLDUMP+SSH': 'ssh' # TODO: How to check that 'mysqldump' is present on the distant machine ???
                        }
    checkCommand(REQUIRED_COMMANDS[backup_type])
    # Set default parameters if missing
    DEFAULT_PARAMETERS = { 'FTP'          : {'port': 21}
                         , 'FTPS'         : {'port': 21}
                         , 'SSH'          : {'port': 22}
                         , 'MYSQLDUMP'    : {'db_port': 3306}
                         , 'MYSQLDUMP+SSH': {'port': 22, 'db_port': 3306}
                         }
    default_config = DEFAULT_PARAMETERS.get(backup_type, {}).copy()
    default_config.update(backup)
    backup.update(default_config)

  # Import pexpect if necessary
  if is_pexpect_required:
    try:
      import pexpect
    except ImportError:
      print "FATAL - pexpect python module not found: it is required to make backup over SSH !"
      sys.exit(1)



  ######################
  # Proceed each backup
  ######################

  for backup in backup_list:

    # Announce the backup item
    title = backup['title']
    print ""
    print "=" * 40
    print "Backup item: %s" % title
    print "=" * 40

    # Create backup folder structure if needed
    main_folder = abspath(SEP.join([BACKUP_DIR, backup['local_dir']]))
    backup_folders = {
        'main'    : main_folder
      , 'archives': abspath(SEP.join([main_folder, 'monthly-archives']))  # Contain monthly archives
      , 'diff'    : abspath(SEP.join([main_folder, 'rdiff-repository']))  # Contain current month diferential backup
      , 'mirror'  : abspath(SEP.join([main_folder, 'mirror']))            # Contain a mirror of the remote folder
      }
    for (folder_type, folder_path) in backup_folders.items():
      if not exists(folder_path):
        if not dry_run:
          makedirs(folder_path)
        print " INFO - '%s' folder created" % folder_path


    ##########
    # Step 1 - Mirror data with the right tool
    ##########

    ### Start of this step
    backup_type = backup['type']
    print " INFO - Start mirroring via %s method" % backup_type

    ### Mirror remote data via FTP or FTPS
    if backup_type in ['FTP', 'FTPS']:
      # Generate FTP url
      remote_url = "ftp://%s:%s@%s:%s/%s" % ( qp(backup['user'])
                                            , qp(backup['password'])
                                            , qp(backup['host'])
                                            , backup['port']
                                            , q(backup['remote_dir'])
                                            )
      # Force SSL layer for secure FTP
      secure_options = ''
      if backup_type == 'FTPS':
        secure_options = 'set ftp:ssl-force true && set ftp:ssl-protect-data true && '
      # Get a copy of the remote directory
      ftp_backup = """lftp -c '%sset ftp:list-options -a && open -e "mirror -e --verbose=3 --parallel=2 . %s" %s'""" % (secure_options, backup_folders['mirror'], remote_url)
      run(ftp_backup, verbose, dry_run)


    ### Mirror remote data via SSH
    elif backup_type == 'SSH':

      ## Test SSH password-less connection
      password_less = isSSHPasswordLess(backup['host'], backup['user'], backup['port'])
      if password_less == None:
        print "ERROR - Can't guess authentication method of '%s:%s'" % (backup['host'], backup['port'])
        continue
      if not password_less and not (backup.has_key('password') and len(backup['password']) > 0):
        print "ERROR - No password provided !"
        continue
      # Use rsync + ssh to make a mirror of the distant folder
      user_string = ''
      if backup['user'] not in (None, ''):
        user_string = "%s@" % backup['user']
      remote_url = "%s%s:%s" % (user_string, backup['host'], backup['remote_dir'])
      rsync_backup = """rsync -axHvz --numeric-ids --progress --stats --delete --partial --delete-excluded -e 'ssh -2 -p %s' %s %s""" % (backup['port'], remote_url, backup_folders['mirror'])

      # If it is passwordless, don't use pexpect but run() method instead
      if password_less:
        run(rsync_backup, verbose, dry_run)
      else:
        # In this case we use pexpect to send the password
        if verbose:
          print " INFO - Run `%s`..." % rsync_backup  # XXX Duplicate with 'run()' method
        if not dry_run:
          p = pexpect.spawn(rsync_backup)   # TODO: create a method similar to run() but that take a password as parameter to handle pexpect nicely
          import StringIO
          p_log = StringIO.StringIO()
          p.setlog(p_log)
          i = p.expect(['.ssword:*', pexpect.EOF, pexpect.TIMEOUT], timeout=TIMEOUT)
          time.sleep(1)
          # Password required
          if i == 0:
            # rsync ask for a password. Send it.
            p.sendline(backup['password'])
            print " INFO - SSH password sent"
            j = p.expect([pexpect.EOF, pexpect.TIMEOUT], timeout=TIMEOUT)
            time.sleep(1)
            if j == 1:
              print "ERROR - Backup via SSH reached timeout"
              continue
          elif i == 1:
            print "ERROR - Backup via SSH didn't end correctly"
            continue
          elif i == 2:
            print "ERROR - Backup via SSH reached timeout"
            continue
          # Terminate child process
          nice_log(p_log.getvalue(), 'rsync')
          p_log.close()
          p.close()


    ### Mirror remote mysql database
    elif backup_type in ['MYSQLDUMP', 'MYSQLDUMP+SSH']:
      # Build mysqldump command
      mysqldump = """mysqldump --host=%s --port=%s --user=%s --password=%s --opt""" % (backup['db_host'], backup['db_port'], backup['db_user'], backup['db_pass'])
      # if no database name provided, dump all databases
      db_to_dump = '--all-databases'
      if backup.has_key('db_name') and len(backup['db_name']) > 0:
        db_to_dump = '--databases %s' % backup['db_name']
      mysqldump += ' %s' % db_to_dump
      # Build final command
      sql_file = abspath(SEP.join([backup_folders['mirror'], SQL_FILENAME]))
      if backup_type == 'MYSQLDUMP+SSH':
        # Test SSH password-less connection
        password_less = isSSHPasswordLess(backup['host'], backup['user'], backup['port'])
        if password_less == None:
          print "FATAL - Can't guess authentication method of '%s:%s'" % (backup['host'], backup['port'])
          continue
        cmd = """ssh -C -2 -p %s %s@%s "%s" > %s""" % (backup['port'], backup['user'], backup['host'], mysqldump, sql_file)
      else:
        cmd = "%s > %s" % (mysqldump, sql_file)
      run(cmd, verbose, dry_run)


    ### Mirroring is successful
    print " INFO - %s mirroring succeed" % backup_type


    ##########
    # Step 2 - Update incremental backup
    ##########

    print " INFO - Add the mirror as increment"

    # Use rdiff-backup to do efficient incremental backups
    rdiff_cmd = """rdiff-backup "%s" "%s" """ % (backup_folders['mirror'], backup_folders['diff'])
    run(rdiff_cmd, verbose, dry_run)

    print " INFO - Increment added"


    ##########
    # Step 3 - Generate monthly archives
    ##########

    # Generate monthly archive name
    today_items   = datetime.date.today().timetuple()
    current_year  = today_items[0]
    current_month = today_items[1]
    monthly_archive = abspath("%s%s%04d-%02d.tar.bz2" % (backup_folders['archives'], SEP, current_year, current_month))
    snapshot_date = "%04d-%02d-01" % (current_year, current_month)

    # If month started, make a bzip2 archive
    if not exists(monthly_archive):
      print " INFO - Generate archive of previous month (= %s 00:00 snapshot)" % snapshot_date
      tmp_archives_path = abspath(backup_folders['archives'] + SEP + "tmp")
      if exists(tmp_archives_path):
        run("""rm -rf "%s" """ % tmp_archives_path, verbose, dry_run)
        print " INFO - Previous temporary folder '%s' removed" % tmp_archives_path
      if not dry_run:
        mkdir(tmp_archives_path)
      print " INFO - Temporary folder '%s' created" % tmp_archives_path
      rdiff_cmd = """rdiff-backup -r "%s" "%s" "%s" """ % ( snapshot_date
                                                          , backup_folders['diff']
                                                          , tmp_archives_path
                                                          )
      run(rdiff_cmd, verbose, dry_run)
      run("tar c -C %s ./ | bzip2 > %s" % (tmp_archives_path, monthly_archive), verbose, dry_run)
      # Delete the tmp folder
      run("""rm -vrf "%s" """ % tmp_archives_path, verbose, dry_run)
    else:
      print " INFO - No need to generate archive: previous month already archived"

    # Keep last 32 increments (31 days = 1 month + 1 day)
    print " INFO - Remove increments older than 32 days"
    rdiff_cmd = """rdiff-backup --force --remove-older-than 32B "%s" """ % backup_folders['diff']
    run(rdiff_cmd, verbose, dry_run)

    # Final message before next backup item
    print " INFO - Backup successful"
Beispiel #12
0
 def fallback(self, raw):
     raise HTTPRedirect(self.fallback_url + q(raw))
Beispiel #13
0
 def _t(self, arg):
     """tests a command on the most recently used bunny1 host.  usage: _t [command]"""
     bunny1_url = load("bunny1testurl")
     raise HTTPRedirect(bunny1_url + q(arg))
Beispiel #14
0
 def g(self, arg):
     """does a google search.  we could fallback to yubnub, but why do an unnecessary roundtrip for something as common as a google search?"""
     return GOOGLE_SEARCH_URL + q(arg)
Beispiel #15
0
def main(verbose=False, dry_run=False):
    """
    Core of the backup script which implement the backup strategy.
  """
    def isSSHPasswordLess(host, user=None, port=22):
        """
      This method test if a ssh authentification on a remote machine can be done via a
      rsa-key/certificate or require a password.
    """
        # If no user given try "user-less" connection
        user_string = ''
        if user not in (None, ''):
            user_string = "%s@" % user
        TEST_STRING = "SSH KEY AUTH OK"
        test_cmd = """ssh -p %s %s%s "echo '%s'" """ % (port, user_string,
                                                        host, TEST_STRING)
        if verbose:
            print " INFO - run `%s`..." % test_cmd
        ssh = pexpect.spawn(test_cmd, timeout=TIMEOUT)
        time.sleep(1)
        if verbose:
            import StringIO
            ssh_log = StringIO.StringIO()
            ssh.log_file = ssh_log
        ret_code = ssh.expect(
            [TEST_STRING, '.ssword:*', pexpect.EOF, pexpect.TIMEOUT])
        time.sleep(1)
        password_less = None
        if ret_code == 0:
            password_less = True
        elif ret_code == 1:
            password_less = False
        else:
            print "ERROR - SSH server '%s:%s' is unreachable" % (host, port)
        if verbose:
            nice_log(ssh_log.getvalue(), 'ssh')
            ssh_log.close()
        ssh.close()
        if password_less:
            print " INFO - SSH connection to '%s:%s' is password-less" % (host,
                                                                          port)
        else:
            print " INFO - SSH connection to '%s:%s' require password" % (host,
                                                                          port)
        return password_less

    ######################
    # Self checking phase
    ######################

    # Announce the first phase
    print "=" * 40
    print "Backup script self-checking phase"
    print "=" * 40

    # Check that we are running this script on a UNIX system
    from os import name as os_name
    if os_name != 'posix':
        print "FATAL - This script doesn't support systems other than POSIX's"
        sys.exit(1)

    # Check that every command is installed
    checkCommand(['rdiff-backup', 'rm', 'tar', 'bzip2'])

    # Check existence of main backup folder
    if not exists(abspath(BACKUP_DIR)):
        print "FATAL - Main backup folder '%s' does't exist !" % BACKUP_DIR
        sys.exit(1)

    # This variable indicate if pexpect module is required or not
    is_pexpect_required = False

    # Check datas and requirement for each backup
    # Doing this right now is nicer to the user: thanks to this he doesn't need to wait the end of the (X)th backup to get the error about the (X+1)th
    for backup in backup_list:
        # Normalize backup type
        backup_type = backup['type'].lower().strip()
        if backup_type.find('ftps') != -1:
            backup_type = 'FTPS'
        elif backup_type.find('ftp') != -1:
            backup_type = 'FTP'
        elif backup_type == 'ssh':
            backup_type = 'SSH'
        elif backup_type.find('mysql') != -1:
            if backup_type.find('ssh') != -1:
                backup_type = 'MYSQLDUMP+SSH'
            else:
                backup_type = 'MYSQLDUMP'
        else:
            print "ERROR - Backup type '%s' for '%s' is unrecognized: ignore it." % (
                backup['type'], title)
            # Reset backup type
            backup['type'] = ''
            continue
        backup['type'] = backup_type
        # Check if pexpect is required
        if backup_type.find('SSH') != -1:
            is_pexpect_required = True
        # Check requirements
        REQUIRED_COMMANDS = {
            'FTP': 'lftp',
            'FTPS': 'lftp',
            'SSH': ['rsync', 'ssh'],
            'MYSQLDUMP': 'mysqldump',
            'MYSQLDUMP+SSH':
            'ssh'  # TODO: How to check that 'mysqldump' is present on the distant machine ???
        }
        checkCommand(REQUIRED_COMMANDS[backup_type])
        # Set default parameters if missing
        DEFAULT_PARAMETERS = {
            'FTP': {
                'port': 21
            },
            'FTPS': {
                'port': 21
            },
            'SSH': {
                'port': 22
            },
            'MYSQLDUMP': {
                'db_port': 3306
            },
            'MYSQLDUMP+SSH': {
                'port': 22,
                'db_port': 3306
            }
        }
        default_config = DEFAULT_PARAMETERS.get(backup_type, {}).copy()
        default_config.update(backup)
        backup.update(default_config)

    # Import pexpect if necessary
    if is_pexpect_required:
        try:
            import pexpect
        except ImportError:
            print "FATAL - pexpect python module not found: it is required to make backup over SSH !"
            sys.exit(1)

    ######################
    # Proceed each backup
    ######################

    for backup in backup_list:

        # Announce the backup item
        title = backup['title']
        print ""
        print "=" * 40
        print "Backup item: %s" % title
        print "=" * 40

        # Create backup folder structure if needed
        main_folder = abspath(SEP.join([BACKUP_DIR, backup['local_dir']]))
        backup_folders = {
            'main': main_folder,
            'archives':
            abspath(SEP.join([main_folder,
                              'monthly-archives']))  # Contain monthly archives
            ,
            'diff':
            abspath(SEP.join([main_folder, 'rdiff-repository'
                              ]))  # Contain current month diferential backup
            ,
            'mirror':
            abspath(SEP.join([main_folder, 'mirror'
                              ]))  # Contain a mirror of the remote folder
        }
        for (folder_type, folder_path) in backup_folders.items():
            if not exists(folder_path):
                if not dry_run:
                    makedirs(folder_path)
                print " INFO - '%s' folder created" % folder_path

        ##########
        # Step 1 - Mirror data with the right tool
        ##########

        ### Start of this step
        backup_type = backup['type']
        print " INFO - Start mirroring via %s method" % backup_type

        ### Mirror remote data via FTP or FTPS
        if backup_type in ['FTP', 'FTPS']:
            # Generate FTP url
            remote_url = "ftp://%s:%s@%s:%s/%s" % (qp(
                backup['user']), qp(backup['password']), qp(
                    backup['host']), backup['port'], q(backup['remote_dir']))
            # Force SSL layer for secure FTP
            secure_options = ''
            if backup_type == 'FTPS':
                secure_options = 'set ftp:ssl-force true && set ftp:ssl-protect-data true && '
            # Get a copy of the remote directory
            ftp_backup = """lftp -c '%sset ftp:list-options -a && open -e "mirror -e --verbose=3 --parallel=2 . %s" %s'""" % (
                secure_options, backup_folders['mirror'], remote_url)
            run(ftp_backup, verbose, dry_run)

        ### Mirror remote data via SSH
        elif backup_type == 'SSH':

            ## Test SSH password-less connection
            password_less = isSSHPasswordLess(backup['host'], backup['user'],
                                              backup['port'])
            if password_less == None:
                print "ERROR - Can't guess authentication method of '%s:%s'" % (
                    backup['host'], backup['port'])
                continue
            if not password_less and not (backup.has_key('password')
                                          and len(backup['password']) > 0):
                print "ERROR - No password provided !"
                continue
            # Use rsync + ssh to make a mirror of the distant folder
            user_string = ''
            if backup['user'] not in (None, ''):
                user_string = "%s@" % backup['user']
            remote_url = "%s%s:%s" % (user_string, backup['host'],
                                      backup['remote_dir'])
            rsync_backup = """rsync -axHvz --numeric-ids --progress --stats --delete --partial --delete-excluded -e 'ssh -2 -p %s' %s %s""" % (
                backup['port'], remote_url, backup_folders['mirror'])

            # If it is passwordless, don't use pexpect but run() method instead
            if password_less:
                run(rsync_backup, verbose, dry_run)
            else:
                # In this case we use pexpect to send the password
                if verbose:
                    print " INFO - Run `%s`..." % rsync_backup  # XXX Duplicate with 'run()' method
                if not dry_run:
                    p = pexpect.spawn(
                        rsync_backup
                    )  # TODO: create a method similar to run() but that take a password as parameter to handle pexpect nicely
                    import StringIO
                    p_log = StringIO.StringIO()
                    p.setlog(p_log)
                    i = p.expect(['.ssword:*', pexpect.EOF, pexpect.TIMEOUT],
                                 timeout=TIMEOUT)
                    time.sleep(1)
                    # Password required
                    if i == 0:
                        # rsync ask for a password. Send it.
                        p.sendline(backup['password'])
                        print " INFO - SSH password sent"
                        j = p.expect([pexpect.EOF, pexpect.TIMEOUT],
                                     timeout=TIMEOUT)
                        time.sleep(1)
                        if j == 1:
                            print "ERROR - Backup via SSH reached timeout"
                            continue
                    elif i == 1:
                        print "ERROR - Backup via SSH didn't end correctly"
                        continue
                    elif i == 2:
                        print "ERROR - Backup via SSH reached timeout"
                        continue
                    # Terminate child process
                    nice_log(p_log.getvalue(), 'rsync')
                    p_log.close()
                    p.close()

        ### Mirror remote mysql database
        elif backup_type in ['MYSQLDUMP', 'MYSQLDUMP+SSH']:
            # Build mysqldump command
            mysqldump = """mysqldump --host=%s --port=%s --user=%s --password=%s --opt""" % (
                backup['db_host'], backup['db_port'], backup['db_user'],
                backup['db_pass'])
            # if no database name provided, dump all databases
            db_to_dump = '--all-databases'
            if backup.has_key('db_name') and len(backup['db_name']) > 0:
                db_to_dump = '--databases %s' % backup['db_name']
            mysqldump += ' %s' % db_to_dump
            # Build final command
            sql_file = abspath(
                SEP.join([backup_folders['mirror'], SQL_FILENAME]))
            if backup_type == 'MYSQLDUMP+SSH':
                # Test SSH password-less connection
                password_less = isSSHPasswordLess(backup['host'],
                                                  backup['user'],
                                                  backup['port'])
                if password_less == None:
                    print "FATAL - Can't guess authentication method of '%s:%s'" % (
                        backup['host'], backup['port'])
                    continue
                cmd = """ssh -C -2 -p %s %s@%s "%s" > %s""" % (
                    backup['port'], backup['user'], backup['host'], mysqldump,
                    sql_file)
            else:
                cmd = "%s > %s" % (mysqldump, sql_file)
            run(cmd, verbose, dry_run)

        ### Mirroring is successful
        print " INFO - %s mirroring succeed" % backup_type

        ##########
        # Step 2 - Update incremental backup
        ##########

        print " INFO - Add the mirror as increment"

        # Use rdiff-backup to do efficient incremental backups
        rdiff_cmd = """rdiff-backup "%s" "%s" """ % (backup_folders['mirror'],
                                                     backup_folders['diff'])
        run(rdiff_cmd, verbose, dry_run)

        print " INFO - Increment added"

        ##########
        # Step 3 - Generate monthly archives
        ##########

        # Generate monthly archive name
        today_items = datetime.date.today().timetuple()
        current_year = today_items[0]
        current_month = today_items[1]
        monthly_archive = abspath(
            "%s%s%04d-%02d.tar.bz2" %
            (backup_folders['archives'], SEP, current_year, current_month))
        snapshot_date = "%04d-%02d-01" % (current_year, current_month)

        # If month started, make a bzip2 archive
        if not exists(monthly_archive):
            print " INFO - Generate archive of previous month (= %s 00:00 snapshot)" % snapshot_date
            tmp_archives_path = abspath(backup_folders['archives'] + SEP +
                                        "tmp")
            if exists(tmp_archives_path):
                run("""rm -rf "%s" """ % tmp_archives_path, verbose, dry_run)
                print " INFO - Previous temporary folder '%s' removed" % tmp_archives_path
            if not dry_run:
                mkdir(tmp_archives_path)
            print " INFO - Temporary folder '%s' created" % tmp_archives_path
            rdiff_cmd = """rdiff-backup -r "%s" "%s" "%s" """ % (
                snapshot_date, backup_folders['diff'], tmp_archives_path)
            run(rdiff_cmd, verbose, dry_run)
            run(
                "tar c -C %s ./ | bzip2 > %s" %
                (tmp_archives_path, monthly_archive), verbose, dry_run)
            # Delete the tmp folder
            run("""rm -vrf "%s" """ % tmp_archives_path, verbose, dry_run)
        else:
            print " INFO - No need to generate archive: previous month already archived"

        # Keep last 32 increments (31 days = 1 month + 1 day)
        print " INFO - Remove increments older than 32 days"
        rdiff_cmd = """rdiff-backup --force --remove-older-than 32B "%s" """ % backup_folders[
            'diff']
        run(rdiff_cmd, verbose, dry_run)

        # Final message before next backup item
        print " INFO - Backup successful"
Beispiel #16
0
def urlencode(d):
    # In case anyone ever wonders. urllib.urlencode DOES NOT take the output
    # of urlparse.parse_qs. Ya, seriously. FU Python.
    from urllib import quote as q
    return '&'.join(['&'.join([k + '=' + q(v) for v in d[k]]) for k in d])
Beispiel #17
0
 def _t(self, arg):
     """tests a command on the most recently used bunny1 host.  usage: _t [command]"""
     bunny1_url = load("bunny1testurl")
     raise HTTPRedirect(bunny1_url + q(arg))
Beispiel #18
0
 def fallback(self, raw):
     raise HTTPRedirect(self.fallback_url + q(raw))
Beispiel #19
0
def urlencode(d):
    # In case anyone ever wonders. urllib.urlencode DOES NOT take the output
    # of urlparse.parse_qs. Ya, seriously. FU Python.
    from urllib import quote as q
    return '&'.join(['&'.join([k + '=' + q(v) for v in d[k]]) for k in d])
Beispiel #20
0
 def _get_url(self):
     "Returns the encoded/quoted URL"
     u = urlparse(self.path)
     return '%s://%s%s' % (u.scheme, u.netloc, q(u.path))