Exemplo n.º 1
0
def list_target_files(config):
    import urllib.parse
    try:
        p = urllib.parse.urlparse(config["target"])
    except ValueError:
        return "invalid target"

    if p.scheme == "file":
        return [(fn, os.path.getsize(os.path.join(p.path, fn)))
                for fn in os.listdir(p.path)]

    elif p.scheme == "s3":
        # match to a Region
        fix_boto()  # must call prior to importing boto
        import boto.s3
        from boto.exception import BotoServerError
        for region in boto.s3.regions():
            if region.endpoint == p.hostname:
                break
        else:
            raise ValueError("Invalid S3 region/host.")

        bucket = p.path[1:].split('/')[0]
        path = '/'.join(p.path[1:].split('/')[1:]) + '/'

        # If no prefix is specified, set the path to '', otherwise boto won't list the files
        if path == '/':
            path = ''

        if bucket == "":
            raise ValueError("Enter an S3 bucket name.")

        # connect to the region & bucket
        try:
            conn = region.connect(aws_access_key_id=config["target_user"],
                                  aws_secret_access_key=config["target_pass"])
            bucket = conn.get_bucket(bucket)
        except BotoServerError as e:
            if e.status == 403:
                raise ValueError("Invalid S3 access key or secret access key.")
            elif e.status == 404:
                raise ValueError("Invalid S3 bucket name.")
            elif e.status == 301:
                raise ValueError("Incorrect region for this bucket.")
            raise ValueError(e.reason)

        return [(key.name[len(path):], key.size)
                for key in bucket.list(prefix=path)]

    else:
        raise ValueError(config["target"])
Exemplo n.º 2
0
def list_target_files(config):
    import urllib.parse

    try:
        p = urllib.parse.urlparse(config["target"])
    except ValueError:
        return "invalid target"

    if p.scheme == "file":
        return [(fn, os.path.getsize(os.path.join(p.path, fn))) for fn in os.listdir(p.path)]

    elif p.scheme == "s3":
        # match to a Region
        fix_boto()  # must call prior to importing boto
        import boto.s3
        from boto.exception import BotoServerError

        for region in boto.s3.regions():
            if region.endpoint == p.hostname:
                break
        else:
            raise ValueError("Invalid S3 region/host.")

        bucket = p.path[1:].split("/")[0]
        path = "/".join(p.path[1:].split("/")[1:]) + "/"

        # If no prefix is specified, set the path to '', otherwise boto won't list the files
        if path == "/":
            path = ""

        if bucket == "":
            raise ValueError("Enter an S3 bucket name.")

            # connect to the region & bucket
        try:
            conn = region.connect(aws_access_key_id=config["target_user"], aws_secret_access_key=config["target_pass"])
            bucket = conn.get_bucket(bucket)
        except BotoServerError as e:
            if e.status == 403:
                raise ValueError("Invalid S3 access key or secret access key.")
            elif e.status == 404:
                raise ValueError("Invalid S3 bucket name.")
            elif e.status == 301:
                raise ValueError("Incorrect region for this bucket.")
            raise ValueError(e.reason)

        return [(key.name[len(path) :], key.size) for key in bucket.list(prefix=path)]

    else:
        raise ValueError(config["target"])
Exemplo n.º 3
0
def index():
	# Render the control panel. This route does not require user authentication
	# so it must be safe!

	no_users_exist = (len(get_mail_users(env)) == 0)
	no_admins_exist = (len(get_admins(env)) == 0)

	utils.fix_boto() # must call prior to importing boto
	import boto.s3
	backup_s3_hosts = [(r.name, r.endpoint) for r in boto.s3.regions()]

	return render_template('index.html',
		hostname=env['PRIMARY_HOSTNAME'],
		storage_root=env['STORAGE_ROOT'],
		no_users_exist=no_users_exist,
		no_admins_exist=no_admins_exist,
		backup_s3_hosts=backup_s3_hosts,
	)
Exemplo n.º 4
0
def index():
    # Render the control panel. This route does not require user authentication
    # so it must be safe!

    no_users_exist = (len(get_mail_users(env)) == 0)
    no_admins_exist = (len(get_admins(env)) == 0)

    utils.fix_boto()  # must call prior to importing boto
    import boto.s3
    backup_s3_hosts = [(r.name, r.endpoint) for r in boto.s3.regions()]

    return render_template(
        'index.html',
        hostname=env['PRIMARY_HOSTNAME'],
        storage_root=env['STORAGE_ROOT'],
        no_users_exist=no_users_exist,
        no_admins_exist=no_admins_exist,
        backup_s3_hosts=backup_s3_hosts,
        csr_country_codes=csr_country_codes,
    )
Exemplo n.º 5
0
def list_target_files(config):
    import urllib.parse
    try:
        target = urllib.parse.urlparse(config["target"])
    except ValueError:
        return "invalid target"

    if target.scheme == "file":
        return [(fn, os.path.getsize(os.path.join(target.path, fn)))
                for fn in os.listdir(target.path)]

    elif target.scheme == "rsync":
        rsync_fn_size_re = re.compile(r'.*    ([^ ]*) [^ ]* [^ ]* (.*)')
        rsync_target = '{host}:{path}'

        target_path = target.path
        if not target_path.endswith('/'):
            target_path = target_path + '/'
        if target_path.startswith('/'):
            target_path = target_path[1:]

        rsync_command = [
            'rsync', '-e',
            '/usr/bin/ssh -i /root/.ssh/id_rsa_miab -oStrictHostKeyChecking=no -oBatchMode=yes',
            '--list-only', '-r',
            rsync_target.format(host=target.netloc, path=target_path)
        ]

        code, listing = shell('check_output',
                              rsync_command,
                              trap=True,
                              capture_stderr=True)
        if code == 0:
            ret = []
            for l in listing.split('\n'):
                match = rsync_fn_size_re.match(l)
                if match:
                    ret.append((match.groups()[1],
                                int(match.groups()[0].replace(',', ''))))
            return ret
        else:
            if 'Permission denied (publickey).' in listing:
                reason = "Invalid user or check you correctly copied the SSH key."
            elif 'No such file or directory' in listing:
                reason = "Provided path {} is invalid.".format(target_path)
            elif 'Network is unreachable' in listing:
                reason = "The IP address {} is unreachable.".format(
                    target.hostname)
            elif 'Could not resolve hostname':
                reason = "The hostname {} cannot be resolved.".format(
                    target.hostname)
            else:
                reason = "Unknown error." \
                  "Please check running 'python management/backup.py --verify'" \
                  "from mailinabox sources to debug the issue."
            raise ValueError(
                "Connection to rsync host failed: {}".format(reason))

    elif target.scheme == "s3":
        # match to a Region
        fix_boto()  # must call prior to importing boto
        import boto.s3
        from boto.exception import BotoServerError
        custom_region = False
        for region in boto.s3.regions():
            if region.endpoint == target.hostname:
                break
        else:
            # If region is not found this is a custom region
            custom_region = True

        bucket = target.path[1:].split('/')[0]
        path = '/'.join(target.path[1:].split('/')[1:]) + '/'

        # Create a custom region with custom endpoint
        if custom_region:
            from boto.s3.connection import S3Connection
            region = boto.s3.S3RegionInfo(name=bucket,
                                          endpoint=target.hostname,
                                          connection_cls=S3Connection)

        # If no prefix is specified, set the path to '', otherwise boto won't list the files
        if path == '/':
            path = ''

        if bucket == "":
            raise ValueError("Enter an S3 bucket name.")

        # connect to the region & bucket
        try:
            conn = region.connect(aws_access_key_id=config["target_user"],
                                  aws_secret_access_key=config["target_pass"])
            bucket = conn.get_bucket(bucket)
        except BotoServerError as e:
            if e.status == 403:
                raise ValueError("Invalid S3 access key or secret access key.")
            elif e.status == 404:
                raise ValueError("Invalid S3 bucket name.")
            elif e.status == 301:
                raise ValueError("Incorrect region for this bucket.")
            raise ValueError(e.reason)

        return [(key.name[len(path):], key.size)
                for key in bucket.list(prefix=path)]

    else:
        raise ValueError(config["target"])
Exemplo n.º 6
0
def list_target_files(config):
	import urllib.parse
	try:
		target = urllib.parse.urlparse(config["target"])
	except ValueError:
		return "invalid target"

	if target.scheme == "file":
		return [(fn, os.path.getsize(os.path.join(target.path, fn))) for fn in os.listdir(target.path)]

	elif target.scheme == "rsync":
		rsync_fn_size_re = re.compile(r'.*    ([^ ]*) [^ ]* [^ ]* (.*)')
		rsync_target = '{host}:{path}'

		if not target.path.endswith('/'):
			target_path = target.path + '/'
		if target.path.startswith('/'):
			target_path = target.path[1:]

		rsync_command = [ 'rsync',
					'-e',
					'/usr/bin/ssh -i /root/.ssh/id_rsa_miab -oStrictHostKeyChecking=no -oBatchMode=yes',
					'--list-only',
					'-r',
					rsync_target.format(
						host=target.netloc,
						path=target_path)
				]

		code, listing = shell('check_output', rsync_command, trap=True, capture_stderr=True)
		if code == 0:
			ret = []
			for l in listing.split('\n'):
				match = rsync_fn_size_re.match(l)
				if match:
					ret.append( (match.groups()[1], int(match.groups()[0].replace(',',''))) )
			return ret
		else:
			if 'Permission denied (publickey).' in listing:
				reason = "Invalid user or check you correctly copied the SSH key."
			elif 'No such file or directory' in listing:
				reason = "Provided path {} is invalid.".format(target_path)
			elif 'Network is unreachable' in listing:
				reason = "The IP address {} is unreachable.".format(target.hostname)
			elif 'Could not resolve hostname':
				reason = "The hostname {} cannot be resolved.".format(target.hostname)
			else:
				reason = "Unknown error." \
						"Please check running 'python management/backup.py --verify'" \
						"from mailinabox sources to debug the issue."
			raise ValueError("Connection to rsync host failed: {}".format(reason))

	elif target.scheme == "s3":
		# match to a Region
		fix_boto() # must call prior to importing boto
		import boto.s3
		from boto.exception import BotoServerError
		for region in boto.s3.regions():
			if region.endpoint == target.hostname:
				break
		else:
			raise ValueError("Invalid S3 region/host.")

		bucket = target.path[1:].split('/')[0]
		path = '/'.join(target.path[1:].split('/')[1:]) + '/'

		# If no prefix is specified, set the path to '', otherwise boto won't list the files
		if path == '/':
			path = ''

		if bucket == "":
			raise ValueError("Enter an S3 bucket name.")

		# connect to the region & bucket
		try:
			conn = region.connect(aws_access_key_id=config["target_user"], aws_secret_access_key=config["target_pass"])
			bucket = conn.get_bucket(bucket)
		except BotoServerError as e:
			if e.status == 403:
				raise ValueError("Invalid S3 access key or secret access key.")
			elif e.status == 404:
				raise ValueError("Invalid S3 bucket name.")
			elif e.status == 301:
				raise ValueError("Incorrect region for this bucket.")
			raise ValueError(e.reason)

		return [(key.name[len(path):], key.size) for key in bucket.list(prefix=path)]

	else:
		raise ValueError(config["target"])
Exemplo n.º 7
0
def list_target_files(config):
    import urllib.parse
    try:
        p = urllib.parse.urlparse(config["target"])
    except ValueError:
        return "invalid target"

    if p.scheme == "file":
        return [(fn, os.path.getsize(os.path.join(p.path, fn)))
                for fn in os.listdir(p.path)]

    elif p.scheme == "rsync":
        rsync_fn_size_re = re.compile(r'.*    ([^ ]*) [^ ]* [^ ]* (.*)')
        rsync_target = '{host}:{path}'

        _, target_host, target_path = config['target'].split('//')
        target_path = '/' + target_path
        if not target_path.endswith('/'):
            target_path += '/'

        rsync_command = [
            'rsync', '-e',
            '/usr/bin/ssh -i /root/.ssh/id_rsa_miab -oStrictHostKeyChecking=no -oBatchMode=yes',
            '--list-only', '-r',
            rsync_target.format(host=target_host, path=target_path)
        ]

        code, listing = shell('check_output', rsync_command, trap=True)
        if code == 0:
            ret = []
            for l in listing.split('\n'):
                match = rsync_fn_size_re.match(l)
                if match:
                    ret.append((match.groups()[1],
                                int(match.groups()[0].replace(',', ''))))
            return ret
        else:
            raise ValueError("Connection to rsync host failed")

    elif p.scheme == "s3":
        # match to a Region
        fix_boto()  # must call prior to importing boto
        import boto.s3
        from boto.exception import BotoServerError
        for region in boto.s3.regions():
            if region.endpoint == p.hostname:
                break
        else:
            raise ValueError("Invalid S3 region/host.")

        bucket = p.path[1:].split('/')[0]
        path = '/'.join(p.path[1:].split('/')[1:]) + '/'

        # If no prefix is specified, set the path to '', otherwise boto won't list the files
        if path == '/':
            path = ''

        if bucket == "":
            raise ValueError("Enter an S3 bucket name.")

        # connect to the region & bucket
        try:
            conn = region.connect(aws_access_key_id=config["target_user"],
                                  aws_secret_access_key=config["target_pass"])
            bucket = conn.get_bucket(bucket)
        except BotoServerError as e:
            if e.status == 403:
                raise ValueError("Invalid S3 access key or secret access key.")
            elif e.status == 404:
                raise ValueError("Invalid S3 bucket name.")
            elif e.status == 301:
                raise ValueError("Incorrect region for this bucket.")
            raise ValueError(e.reason)

        return [(key.name[len(path):], key.size)
                for key in bucket.list(prefix=path)]

    else:
        raise ValueError(config["target"])
Exemplo n.º 8
0
def list_target_files(config):
	import urllib.parse
	try:
		p = urllib.parse.urlparse(config["target"])
	except ValueError:
		return "invalid target"

	if p.scheme == "file":
		return [(fn, os.path.getsize(os.path.join(p.path, fn))) for fn in os.listdir(p.path)]

	elif p.scheme == "rsync":
		rsync_fn_size_re = re.compile(r'.*    ([^ ]*) [^ ]* [^ ]* (.*)')
		rsync_target = '{host}:{path}'

		_, target_host, target_path = config['target'].split('//')
		target_path = '/' + target_path
		if not target_path.endswith('/'):
			target_path += '/'

		rsync_command = [ 'rsync',
					'-e',
					'/usr/bin/ssh -i /root/.ssh/id_rsa_miab -oStrictHostKeyChecking=no -oBatchMode=yes',
					'--list-only',
					'-r',
					rsync_target.format(
						host=target_host,
						path=target_path)
				]

		code, listing = shell('check_output', rsync_command, trap=True)
		if code == 0:
			ret = []
			for l in listing.split('\n'):
				match = rsync_fn_size_re.match(l)
				if match:
					ret.append( (match.groups()[1], int(match.groups()[0].replace(',',''))) )
			return ret
		else:
			raise ValueError("Connection to rsync host failed")

	elif p.scheme == "s3":
		# match to a Region
		fix_boto() # must call prior to importing boto
		import boto.s3
		from boto.exception import BotoServerError
		for region in boto.s3.regions():
			if region.endpoint == p.hostname:
				break
		else:
			raise ValueError("Invalid S3 region/host.")

		bucket = p.path[1:].split('/')[0]
		path = '/'.join(p.path[1:].split('/')[1:]) + '/'

		# If no prefix is specified, set the path to '', otherwise boto won't list the files
		if path == '/':
			path = ''

		if bucket == "":
			raise ValueError("Enter an S3 bucket name.")

		# connect to the region & bucket
		try:
			conn = region.connect(aws_access_key_id=config["target_user"], aws_secret_access_key=config["target_pass"])
			bucket = conn.get_bucket(bucket)
		except BotoServerError as e:
			if e.status == 403:
				raise ValueError("Invalid S3 access key or secret access key.")
			elif e.status == 404:
				raise ValueError("Invalid S3 bucket name.")
			elif e.status == 301:
				raise ValueError("Incorrect region for this bucket.")
			raise ValueError(e.reason)

		return [(key.name[len(path):], key.size) for key in bucket.list(prefix=path)]

	else:
		raise ValueError(config["target"])
Exemplo n.º 9
0
def list_target_files(config):
    import urllib.parse
    try:
        target = urllib.parse.urlparse(config["target"])
    except ValueError:
        return "invalid target"

    if target.scheme == "file":
        return [(fn, os.path.getsize(os.path.join(target.path, fn)))
                for fn in os.listdir(target.path)]

    elif target.scheme == "rsync":
        rsync_fn_size_re = re.compile(r'.*    ([^ ]*) [^ ]* [^ ]* (.*)')
        rsync_target = '{host}:{path}'

        target_path = target.path
        if not target_path.endswith('/'):
            target_path = target_path + '/'
        if target_path.startswith('/'):
            target_path = target_path[1:]

        rsync_command = [
            'rsync', '-e',
            rsync_ssh_options(config["target_rsync_port"], direct=True),
            '--list-only', '-r',
            rsync_target.format(host=target.netloc, path=target_path)
        ]

        code, listing = shell('check_output',
                              rsync_command,
                              trap=True,
                              capture_stderr=True)
        if code == 0:
            ret = []
            for l in listing.split('\n'):
                match = rsync_fn_size_re.match(l)
                if match:
                    ret.append((match.groups()[1],
                                int(match.groups()[0].replace(',', ''))))
            return ret
        else:
            if 'Permission denied (publickey).' in listing:
                reason = "Invalid user or check you correctly copied the SSH key."
            elif 'No such file or directory' in listing:
                reason = "Provided path {} is invalid.".format(target_path)
            elif 'Network is unreachable' in listing:
                reason = "The IP address {} is unreachable.".format(
                    target.hostname)
            elif 'Could not resolve hostname' in listing:
                reason = "The hostname {} cannot be resolved.".format(
                    target.hostname)
            else:
                reason = "Unknown error. " \
                  "Please check running 'management/backup.py --verify' " \
                  "from mailinabox sources to debug the issue."
            raise ValueError(
                "Connection to rsync host failed: {}".format(reason))

    elif target.scheme == "s3":
        # match to a Region
        fix_boto()  # must call prior to importing boto
        import boto.s3
        from boto.exception import BotoServerError
        custom_region = False
        for region in boto.s3.regions():
            if region.endpoint == target.hostname:
                break
        else:
            # If region is not found this is a custom region
            custom_region = True

        bucket = target.path[1:].split('/')[0]
        path = '/'.join(target.path[1:].split('/')[1:]) + '/'

        # Create a custom region with custom endpoint
        if custom_region:
            from boto.s3.connection import S3Connection
            region = boto.s3.S3RegionInfo(name=bucket,
                                          endpoint=target.hostname,
                                          connection_cls=S3Connection)

        # If no prefix is specified, set the path to '', otherwise boto won't list the files
        if path == '/':
            path = ''

        if bucket == "":
            raise ValueError("Enter an S3 bucket name.")

        # connect to the region & bucket
        try:
            conn = region.connect(aws_access_key_id=config["target_user"],
                                  aws_secret_access_key=config["target_pass"])
            bucket = conn.get_bucket(bucket)
        except BotoServerError as e:
            if e.status == 403:
                raise ValueError("Invalid S3 access key or secret access key.")
            elif e.status == 404:
                raise ValueError("Invalid S3 bucket name.")
            elif e.status == 301:
                raise ValueError("Incorrect region for this bucket.")
            raise ValueError(e.reason)

        return [(key.name[len(path):], key.size)
                for key in bucket.list(prefix=path)]
    elif target.scheme == 'b2':
        InMemoryAccountInfo = None
        B2Api = None
        NonExistentBucket = None

        if get_os_code() == "Debian10":
            # WARNING: This is deprecated code using a legacy library.
            # We need it because Debian 10 ships with an old version of Duplicity
            from b2.account_info import InMemoryAccountInfo
            from b2.api import B2Api
            from b2.exception import NonExistentBucket
        else:
            from b2sdk.v1 import InMemoryAccountInfo, B2Api
            from b2sdk.v1.exception import NonExistentBucket

        info = InMemoryAccountInfo()
        b2_api = B2Api(info)

        # Extract information from target
        b2_application_keyid = target.netloc[:target.netloc.index(':')]
        b2_application_key = target.netloc[target.netloc.index(':') +
                                           1:target.netloc.index('@')]
        b2_bucket = target.netloc[target.netloc.index('@') + 1:]

        try:
            b2_api.authorize_account("production", b2_application_keyid,
                                     b2_application_key)
            bucket = b2_api.get_bucket_by_name(b2_bucket)
        except NonExistentBucket as e:
            raise ValueError(
                "B2 Bucket does not exist. Please double check your information!"
            )
        return [(key.file_name, key.size) for key, _ in bucket.ls()]

    else:
        raise ValueError(config["target"])