コード例 #1
0
ファイル: bacumon.py プロジェクト: rusaksoft/Plyushkin
def check_week1_2_last_file_date(action):
	url = build_url(action)
	status1 = _check_simple_last_file_date(action, backend.get_backend(url + "/week1"))
	status2 = _check_simple_last_file_date(action, backend.get_backend(url + "/week2"))
	action.status = max([status1, status2])
	action.last_check = datetime.now()
	action.save()
コード例 #2
0
ファイル: test_collections.py プロジェクト: mjuric/duplicity
    def setUp(self):
        super(CollectionTest, self).setUp()

        self.unpack_testfiles()

        col_test_dir = path.Path("testfiles/collectionstest")
        archive_dir = col_test_dir.append("archive_dir")
        self.set_global('archive_dir', archive_dir)
        self.archive_dir_backend = backend.get_backend("file://testfiles/collectionstest"
                                                       "/archive_dir")

        self.real_backend = backend.get_backend("file://%s/%s" %
                                                (col_test_dir.name, "remote_dir"))
        self.output_dir = path.Path("testfiles/output")  # used as a temp directory
        self.output_dir_backend = backend.get_backend("file://testfiles/output")
コード例 #3
0
ファイル: bacumon.py プロジェクト: rusaksoft/Plyushkin
def check_duplicity(action):
	dest = backend.get_backend(build_url(action))

	lst = dest.list()

	lst = [f for f in lst if f.startswith("duplicity-inc.") and f.endswith(".manifest.gpg")]
	#TODO is it safe check? can we sure these file creates after all backup uploaded?

	print lst

	lst = [re.search(".*to\.(.*)\.manifest.gpg",f).group(1) for f in lst]

	if not lst:
		action.status = "No files"
		action.last_check = datetime.now()
		action.save()
		return action.status 

	last_backup = datetime.strptime(max(lst),"%Y%m%dT%H%M%SZ")

	action.last_check = datetime.now()
	action.status = last_backup
	action.save()

	return action.status
コード例 #4
0
ファイル: bacumon.py プロジェクト: rusaksoft/Plyushkin
def get_used_space(storage, path = ""):	
	#TODO reuse backend

	url = storage.access_type+"://"+storage.login + ":" + storage.password \
	 + "@" + storage.url + path

	print "url:"+url

	dest = backend.get_backend(url)
	
	used_space = 0

	lst = dest.list_with_attr()
	# print lst

	for item in lst:
		if item[0].startswith('d'):
			dir_name = item[8]
			if len(item) > 9:
				dir_name = " ".join(item[8:])
			
			print "dir_name:"+dir_name

			if not dir_name.strip():
				print "Error: Empty dir name"
				continue

			try:
				used_space += get_used_space(storage, path + "/" + dir_name)
			except BackendException, e:
				print "BackendException: ", e
		else:
			used_space += int(item[4])
コード例 #5
0
ファイル: test_collections.py プロジェクト: lorenz/duplicity
    def setUp(self):
        super(CollectionTest, self).setUp()

        self.unpack_testfiles()

        col_test_dir = path.Path(u"testfiles/collectionstest")
        archive_dir_path = col_test_dir.append(u"archive_dir")
        self.set_global(u'archive_dir_path', archive_dir_path)
        self.archive_dir_backend = backend.get_backend(
            u"file://testfiles/collectionstest"
            u"/archive_dir")

        self.real_backend = backend.get_backend(
            u"file://%s/%s" % (col_test_dir.uc_name, u"remote_dir"))
        self.output_dir = path.Path(
            u"testfiles/output")  # used as a temp directory
        self.output_dir_backend = backend.get_backend(
            u"file://testfiles/output")
コード例 #6
0
ファイル: commandline.py プロジェクト: kennethjiang/duplicity
def set_backend(arg1, arg2):
    """Figure out which arg is url, set backend

    Return value is pair (path_first, path) where is_first is true iff
    path made from arg1.

    """
    path, bend = args_to_path_backend(arg1, arg2)

    globals.backend = backend.get_backend(bend)

    if path == arg2:
        return (None, arg2)  # False?
    else:
        return (1, arg1)  # True?
コード例 #7
0
ファイル: commandline.py プロジェクト: mjuric/duplicity
def set_backend(arg1, arg2):
    """Figure out which arg is url, set backend

    Return value is pair (path_first, path) where is_first is true iff
    path made from arg1.

    """
    path, bend = args_to_path_backend(arg1, arg2)

    globals.backend = backend.get_backend(bend)

    if path == arg2:
        return (None, arg2)  # False?
    else:
        return (1, arg1)  # True?
コード例 #8
0
ファイル: __init__.py プロジェクト: andreagrandi/duplicity-sx
    def setUp(self):
        super(FunctionalTestCase, self).setUp()

        self.unpack_testfiles()

        self.class_args = []
        self.backend_url = "file://testfiles/output"
        self.last_backup = None
        self.set_environ('PASSPHRASE', self.sign_passphrase)
        self.set_environ("SIGN_PASSPHRASE", self.sign_passphrase)

        backend_inst = backend.get_backend(self.backend_url)
        bl = backend_inst.list()
        if bl:
            backend_inst.delete(backend_inst.list())
        backend_inst.close()
コード例 #9
0
ファイル: __init__.py プロジェクト: ralle12345/duplicity
    def setUp(self):
        super(FunctionalTestCase, self).setUp()

        self.unpack_testfiles()

        self.class_args = []
        self.backend_url = "file://testfiles/output"
        self.last_backup = None
        self.set_environ('PASSPHRASE', self.sign_passphrase)
        self.set_environ("SIGN_PASSPHRASE", self.sign_passphrase)

        backend_inst = backend.get_backend(self.backend_url)
        bl = backend_inst.list()
        if bl:
            backend_inst.delete(backend_inst.list())
        backend_inst.close()
コード例 #10
0
    def setUp(self):
        super(FunctionalTestCase, self).setUp()

        self.unpack_testfiles()

        self.class_args = []
        self.backend_url = u"file://{0}/testfiles/output".format(_runtest_dir)
        self.last_backup = None
        self.set_environ(u'PASSPHRASE', self.sign_passphrase)
        self.set_environ(u"SIGN_PASSPHRASE", self.sign_passphrase)

        backend_inst = backend.get_backend(self.backend_url)
        bl = backend_inst.list()
        if bl:
            backend_inst.delete(backend_inst.list())
        backend_inst.close()
        self._check_setsid()
コード例 #11
0
ファイル: commandline.py プロジェクト: mjuric/duplicity
def ProcessCommandLine(cmdline_list):
    """Process command line, set globals, return action

    action will be "list-current", "collection-status", "cleanup",
    "remove-old", "restore", "verify", "full", or "inc".

    """
    # build initial gpg_profile
    globals.gpg_profile = gpg.GPGProfile()

    # parse command line
    args = parse_cmdline_options(cmdline_list)

    # if we get a different gpg-binary from the commandline then redo gpg_profile
    if globals.gpg_binary is not None:
        src = globals.gpg_profile
        globals.gpg_profile = gpg.GPGProfile(
            passphrase=src.passphrase,
            sign_key=src.sign_key,
            recipients=src.recipients,
            hidden_recipients=src.hidden_recipients)
    log.Debug(_("GPG binary is %s, version %s") %
              ((globals.gpg_binary or 'gpg'),
               "%d.%d.%d" % globals.gpg_profile.gpg_version))

    # we can now try to import all the backends
    backend.import_backends()

    # parse_cmdline_options already verified that we got exactly 1 or 2
    # non-options arguments
    assert len(args) >= 1 and len(args) <= 2, "arg count should have been checked already"

    if len(args) == 1:
        if list_current:
            action = "list-current"
        elif collection_status:
            action = "collection-status"
        elif cleanup:
            action = "cleanup"
        elif globals.remove_time is not None:
            action = "remove-old"
        elif globals.remove_all_but_n_full_mode:
            action = "remove-all-but-n-full"
        elif globals.remove_all_inc_of_but_n_full_mode:
            action = "remove-all-inc-of-but-n-full"
        else:
            command_line_error("Too few arguments")
        globals.backend = backend.get_backend(args[0])
        if not globals.backend:
            log.FatalError(_("""Bad URL '%s'.
Examples of URL strings are "scp://[email protected]:1234/path" and
"file:///usr/local".  See the man page for more information.""") % (args[0],),
                           log.ErrorCode.bad_url)
    elif len(args) == 2:
        # Figure out whether backup or restore
        backup, local_pathname = set_backend(args[0], args[1])
        if backup:
            if full_backup:
                action = "full"
            else:
                action = "inc"
        else:
            if verify:
                action = "verify"
            else:
                action = "restore"

        process_local_dir(action, local_pathname)
        if action in ['full', 'inc', 'verify']:
            set_selection()
    elif len(args) > 2:
        raise AssertionError("this code should not be reachable")

    check_consistency(action)
    log.Info(_("Main action: ") + action)
    return action
コード例 #12
0
ファイル: bacumon.py プロジェクト: rusaksoft/Plyushkin
def check_simple_last_file_date(action):
	action.status = _check_simple_last_file_date(action, backend.get_backend(build_url(action)))
	action.last_check = datetime.now()
	action.save()
コード例 #13
0
ファイル: commandline.py プロジェクト: kennethjiang/duplicity
def ProcessCommandLine(cmdline_list):
    """Process command line, set globals, return action

    action will be "list-current", "collection-status", "cleanup",
    "remove-old", "restore", "verify", "full", or "inc".

    """
    globals.gpg_profile = gpg.GPGProfile()

    args = parse_cmdline_options(cmdline_list)

    # we can now try to import all the backends
    backend.import_backends()

    # parse_cmdline_options already verified that we got exactly 1 or 2
    # non-options arguments
    assert len(args) >= 1 and len(
        args) <= 2, "arg count should have been checked already"

    if len(args) == 1:
        if list_current:
            action = "list-current"
        elif collection_status:
            action = "collection-status"
        elif cleanup:
            action = "cleanup"
        elif globals.remove_time is not None:
            action = "remove-old"
        elif globals.remove_all_but_n_full_mode:
            action = "remove-all-but-n-full"
        elif globals.remove_all_inc_of_but_n_full_mode:
            action = "remove-all-inc-of-but-n-full"
        else:
            command_line_error("Too few arguments")
        globals.backend = backend.get_backend(args[0])
        if not globals.backend:
            log.FatalError(
                _("""Bad URL '%s'.
Examples of URL strings are "scp://[email protected]:1234/path" and
"file:///usr/local".  See the man page for more information.""") % (args[0], ),
                log.ErrorCode.bad_url)
    elif len(args) == 2:
        # Figure out whether backup or restore
        backup, local_pathname = set_backend(args[0], args[1])
        if backup:
            if full_backup:
                action = "full"
            else:
                action = "inc"
        else:
            if verify:
                action = "verify"
            else:
                action = "restore"

        process_local_dir(action, local_pathname)
        if action in ['full', 'inc', 'verify']:
            set_selection()
    elif len(args) > 2:
        raise AssertionError("this code should not be reachable")

    check_consistency(action)
    log.Info(_("Main action: ") + action)
    return action
コード例 #14
0
def main():
    output = []

    def Log(s, verb_level, code=1, extra=None, force_print=False):
        if verb_level <= log.getverbosity():
            output.extend(s.split("\n"))

    # def PrintCollectionStatus(col_stats, force_print=False):
    #     # raise ValueError(type(col_stats.matched_chain_pair[1]))
    #     output.append({
    #         "num_backup_sets":
    #     })

    # log.PrintCollectionStatus = PrintCollectionStatus

    results = None
    try:
        settings = dict()
        Intersplunk.readResults(None, settings, True)

        dup_time.setcurtime()

        archive_dir = os.path.join(app_dir, "local", "data", "archive")

        try:
            os.makedirs(archive_dir)
        except:
            pass

        if sys.argv[1] == "splunk-last-backups":
            ap = argparse.ArgumentParser()
            ap.add_argument("--time", type=int)
            ap.add_argument("backend")
            args = ap.parse_args(sys.argv[2:])

            dup_globals.gpg_profile = gpg.GPGProfile()
            dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"]

            backend.import_backends()

            dup_globals.backend = backend.get_backend(args.backend)

            if dup_globals.backup_name is None:
                dup_globals.backup_name = commandline.generate_default_backup_name(
                    args.backend)

            commandline.set_archive_dir(archive_dir)

            results = []
            time = args.time
            col_stats = dup_collections.CollectionsStatus(
                dup_globals.backend, dup_globals.archive_dir_path,
                "list-current").set_values()

            try:
                sig_chain = col_stats.get_backup_chain_at_time(time)
            except dup_collections.CollectionsError:
                results.append({
                    "last_full_backup_time": 0,
                    "last_incr_backup_time": 0,
                })
            else:
                if sig_chain.incset_list:
                    last_incr_backup_time = max(
                        [incset.end_time for incset in sig_chain.incset_list])
                else:
                    last_incr_backup_time = 0

                results.append({
                    "last_full_backup_time":
                    col_stats.get_last_full_backup_time(),
                    "last_incr_backup_time":
                    last_incr_backup_time
                })
        elif sys.argv[1] == "splunk-file-list":
            ap = argparse.ArgumentParser()
            ap.add_argument("--time")
            ap.add_argument("backend")
            args = ap.parse_args(sys.argv[2:])
            args.time = int(args.time.split(".")[0])

            dup_time.setcurtime(args.time)
            dup_globals.restore_time = args.time

            dup_globals.gpg_profile = gpg.GPGProfile()
            dup_globals.gpg_profile.passphrase = os.environ["PASSPHRASE"]

            backend.import_backends()

            dup_globals.backend = backend.get_backend(args.backend)

            if dup_globals.backup_name is None:
                dup_globals.backup_name = commandline.generate_default_backup_name(
                    args.backend)

            commandline.set_archive_dir(archive_dir)

            results = []
            col_stats = dup_collections.CollectionsStatus(
                dup_globals.backend, dup_globals.archive_dir_path,
                "list-current").set_values()

            time = args.time
            sig_chain = col_stats.get_signature_chain_at_time(time)

            path_iter = diffdir.get_combined_path_iter(
                sig_chain.get_fileobjs(time))
            for path in path_iter:
                if path.difftype != u"deleted" and path.index:
                    mode = bin(path.mode)[2:]

                    perms = ""
                    for p, val in enumerate(mode):
                        if p in (0, 3, 6):
                            c = "r"
                        elif p in (1, 4, 7):
                            c = "w"
                        elif p in (2, 5, 8):
                            c = "x"

                        perms += c if int(val) else "-"

                    if path.type == "dir":
                        perms = "d" + perms
                    elif path.type == "sym":
                        perms = "l" + perms
                    else:
                        perms = "-" + perms

                    results.append({
                        "perms": perms,
                        "owner": path.stat.st_uid,
                        "group": path.stat.st_gid,
                        "size": path.stat.st_size,
                        "modtime": path.stat.st_mtime,
                        "filename": os.path.join(*path.index),
                    })
        else:
            args = ["--archive-dir", archive_dir] + sys.argv[1:]
            action = commandline.ProcessCommandLine(args)

            log.Log = Log
            try:
                dup_main.do_backup(action)
            except dup_collections.CollectionsError:
                results = []
    except SystemExit:
        pass
    except Exception as e:
        import traceback
        # sys.stderr.write(traceback.format_exc())

        Intersplunk.generateErrorResults("Traceback: %s" %
                                         traceback.format_exc())

        return

    if output and not results:
        import time

        results = [{"_raw": "\n".join(output), "_time": time.time()}]

    if results:
        try:
            Intersplunk.outputResults(results)
        except Exception:
            import traceback
            sys.stderr.write(traceback.format_exc())
            results = Intersplunk.generateErrorResults("Traceback: %s" %
                                                       traceback.format_exc())
            Intersplunk.outputResults(results)
コード例 #15
0
ファイル: collectionstest.py プロジェクト: AZed/duplicity
                  "duplicity-full.2002-08-17T16:17:01-07:00.vol6.difftar.gpg",
                  "duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.manifest.gpg",
                  "duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.vol1.difftar.gpg",
                  "The following are extraneous duplicity files",
                  "duplicity-new-signatures.2001-08-17T02:05:13-05:00.to.2002-08-17T05:05:14-05:00.sigtar.gpg",
                  "duplicity-full.2002-08-15T01:01:01-07:00.vol1.difftar.gpg",
                  "duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.manifest.gpg",
                  "duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.vol1.difftar.gpg",
                  "Extra stuff to be ignored"]

assert not os.system("tar xzf testfiles.tar.gz > /dev/null 2>&1")

col_test_dir = path.Path("testfiles/collectionstest")
archive_dir = col_test_dir.append("archive_dir")
globals.archive_dir = archive_dir
archive_dir_backend = backend.get_backend("file://testfiles/collectionstest"
                                           "/archive_dir")

dummy_backend = None
real_backend = backend.get_backend("file://%s/%s" %
                                   (col_test_dir.name, "remote_dir"))
output_dir = path.Path("testfiles/output") # used as a temp directory
output_dir_backend = backend.get_backend("file://testfiles/output")


class CollectionTest(unittest.TestCase):
    """Test collections"""
    def setUp(self):
        assert not os.system("tar xzf testfiles.tar.gz > /dev/null 2>&1")
        assert not os.system("mkdir testfiles/output")

    def tearDown(self):
コード例 #16
0
 def get_backend_files(self):
     backend_inst = backend.get_backend(self.backend_url)
     bl = backend_inst.list()
     backend_inst.close()
     return set(bl)
コード例 #17
0
 def get_backend_files(self):
     backend_inst = backend.get_backend(self.backend_url)
     bl = backend_inst.list()
     backend_inst.close()
     return set(bl)
コード例 #18
0
    "duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.manifest.gpg",
    "duplicity-inc.2002-08-17T16:17:01-07:00.to.2002-08-18T00:04:30-07:00.vol1.difftar.gpg",
    "The following are extraneous duplicity files",
    "duplicity-new-signatures.2001-08-17T02:05:13-05:00.to.2002-08-17T05:05:14-05:00.sigtar.gpg",
    "duplicity-full.2002-08-15T01:01:01-07:00.vol1.difftar.gpg",
    "duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.manifest.gpg",
    "duplicity-inc.2000-08-17T16:17:01-07:00.to.2000-08-18T00:04:30-07:00.vol1.difftar.gpg",
    "Extra stuff to be ignored"
]

assert not os.system("tar xzf testfiles.tar.gz > /dev/null 2>&1")

col_test_dir = path.Path("testfiles/collectionstest")
archive_dir = col_test_dir.append("archive_dir")
globals.archive_dir = archive_dir
archive_dir_backend = backend.get_backend("file://testfiles/collectionstest"
                                          "/archive_dir")

dummy_backend = None
real_backend = backend.get_backend("file://%s/%s" %
                                   (col_test_dir.name, "remote_dir"))
output_dir = path.Path("testfiles/output")  # used as a temp directory
output_dir_backend = backend.get_backend("file://testfiles/output")


class CollectionTest(unittest.TestCase):
    """Test collections"""
    def setUp(self):
        assert not os.system("tar xzf testfiles.tar.gz > /dev/null 2>&1")
        assert not os.system("mkdir testfiles/output")

    def tearDown(self):