Ejemplo n.º 1
0
def sync_one_way(src: Union[RecordStore, str, Path],
                 target: Union[RecordStore, str, Path],
                 project_name: str
    ) -> List[str]:
    """
    Merge the records from `src` into `target`.
    Equivalent to Sumatra's RecordStore.sync(), except that only the `target`
    store is updated.

    Where the two stores have the same label (within a project) for
    different records, those records will not be synced. The function
    returns a list of non-synchronizable records (empty if the sync worked
    perfectly).
    """
    from sumatra.recordstore import get_record_store
    if isinstance(src, (str, Path)):
        src = get_record_store(str(src))
    if isinstance(target, (str, Path)):
        target = get_record_store(str(target))
    
    # NB: Copied almost verbatim from sumatra.recordstore.base
    src_labels = set(src.labels(project_name))
    target_labels = set(target.labels(project_name))
    only_in_src = src_labels.difference(target_labels)
    # only_in_target = target_labels.difference(src_labels)
    in_both = src_labels.intersection(target_labels)
    non_synchronizable = []
    for label in in_both:
        if src.get(project_name, label) != target.get(project_name, label):
            non_synchronizable.append(label)
    for label in only_in_src:
        target.save(project_name, src.get(project_name, label))
    # for label in only_in_target:
    #     src.save(project_name, target.get(project_name, label))
    return non_synchronizable
Ejemplo n.º 2
0
def sync(argv):
    usage = "%(prog)s sync PATH1 [PATH2]"
    description = dedent("""\
        Synchronize two record stores. If both PATH1 and PATH2 are given, the
        record stores at those locations will be synchronized. If only PATH1 is
        given, and the command is run in a directory containing a Sumatra
        project, only that project's records be synchronized with the store at
        PATH1. Note that PATH1 and PATH2 may be either filesystem paths or URLs.
        """)  # need to say what happens if the sync is incomplete due to label collisions
    parser = ArgumentParser(usage=usage,
                            description=description)
    parser.add_argument('path1')
    parser.add_argument('path2', nargs='?')
    args = parser.parse_args(argv)

    store1 = get_record_store(args.path1)
    if args.path2:
        store2 = get_record_store(args.path2)
        collisions = store1.sync_all(store2)
    else:
        project = load_project()
        store2 = project.record_store
        collisions = store1.sync(store2, project.name)

    if collisions:
        print("Synchronization incomplete: there are two records with the same name for the following: %s" % ", ".join(collisions))
        sys.exit(1)
Ejemplo n.º 3
0
def sync(argv):
    usage = "%(prog)s sync PATH1 [PATH2]"
    description = dedent("""\
        Synchronize two record stores. If both PATH1 and PATH2 are given, the
        record stores at those locations will be synchronized. If only PATH1 is
        given, and the command is run in a directory containing a Sumatra
        project, only that project's records be synchronized with the store at
        PATH1. Note that PATH1 and PATH2 may be either filesystem paths or URLs.
        """)  # need to say what happens if the sync is incomplete due to label collisions
    parser = ArgumentParser(usage=usage,
                            description=description)
    parser.add_argument('path1')
    parser.add_argument('path2', nargs='?')
    args = parser.parse_args(argv)

    store1 = get_record_store(args.path1)
    if args.path2:
        store2 = get_record_store(args.path2)
        collisions = store1.sync_all(store2)
    else:
        project = load_project()
        store2 = project.record_store
        collisions = store1.sync(store2, project.name)

    if collisions:
        print("Synchronization incomplete: there are two records with the same name for the following: %s" % ", ".join(collisions))
        sys.exit(1)
Ejemplo n.º 4
0
 def test_get_record_store_shelve(self):
     store = shelve_store.ShelveRecordStore(shelf_name="test_record_store.shelf")
     store.shelf[str("foo")] = "bar"
     store.shelf.sync()
     del store
     assert os.path.exists("test_record_store.shelf") or os.path.exists("test_record_store.shelf.db")
     self.assertIsInstance(get_record_store("test_record_store.shelf"),
                           shelve_store.ShelveRecordStore)
Ejemplo n.º 5
0
 def test_get_record_store_shelve(self):
     store = shelve_store.ShelveRecordStore(shelf_name="test_record_store.shelf")
     key = "foo".__str__()  # string wrapping is necessary for dumbdbm, which fails with unicode in Py2
     store.shelf[key] = "bar"
     store.shelf.sync()
     del store
     assert len(glob("test_record_store.shelf*")) > 0
     self.assertIsInstance(get_record_store("test_record_store.shelf"), shelve_store.ShelveRecordStore)
Ejemplo n.º 6
0
def determine_record_store(prj, sumatra_options, err=Exception):
    if 'record_store' in sumatra_options and sumatra_options["record_store"]:
        record_store = get_record_store(sumatra_options["record_store"])
    elif prj is None:
        raise err('Neither project_dir nor record_store defined')
    else:
        record_store = prj.record_store
    return record_store
Ejemplo n.º 7
0
 def test_get_record_store_shelve(self):
     store = shelve_store.ShelveRecordStore(shelf_name="test_record_store.shelf")
     key = "foo".__str__()  # string wrapping is necessary for dumbdbm, which fails with unicode in Py2
     store.shelf[key] = "bar"
     store.shelf.sync()
     del store
     assert len(glob("test_record_store.shelf*")) > 0
     self.assertIsInstance(get_record_store("test_record_store.shelf"),
                           shelve_store.ShelveRecordStore)
Ejemplo n.º 8
0
 def test_get_record_store_shelve(self):
     store = shelve_store.ShelveRecordStore(
         shelf_name="test_record_store.shelf")
     store.shelf[str("foo")] = "bar"
     store.shelf.sync()
     del store
     assert os.path.exists("test_record_store.shelf") or os.path.exists(
         "test_record_store.shelf.db")
     self.assertIsInstance(get_record_store("test_record_store.shelf"),
                           shelve_store.ShelveRecordStore)
Ejemplo n.º 9
0
def determine_record_store(prj, sumatra_options, err=Exception):
    if 'record_store' in sumatra_options and sumatra_options["record_store"]:
        record_store = get_record_store(sumatra_options["record_store"])
    elif prj is None:
        raise err(
            'Neither project_dir nor record_store defined'
        )
    else:
        record_store = prj.record_store
    return record_store
Ejemplo n.º 10
0
def configure(argv):
    """Modify the settings for the current project."""
    usage = "%(prog)s configure [options]"
    description = "Modify the settings for the current project."
    parser = ArgumentParser(usage=usage,
                            description=description)
    parser.add_argument('-d', '--datapath', metavar='PATH', help="set the path to the directory in which smt will search for datafiles generated by the simulation or analysis.")
    parser.add_argument('-i', '--input', metavar='PATH', default=None, help="set the path to the directory in which smt will search for input datafiles.")
    parser.add_argument('-l', '--addlabel', choices=['cmdline', 'parameters', None], metavar='OPTION',
                        default=None, help="If this option is set, smt will append the record label either to the command line (option 'cmdline') or to the parameter file (option 'parameters'), and will add the label to the datapath when searching for datafiles. It is up to the user to make use of this label inside their program to ensure files are created in the appropriate location.")
    parser.add_argument('-e', '--executable', metavar='PATH', help="set the path to the executable.")
    parser.add_argument('-r', '--repository', help="the URL of a Subversion or Mercurial repository containing the code. This will be checked out/cloned into the current directory.")
    parser.add_argument('-m', '--main', help="the name of the script that would be supplied on the command line if running the simulator normally, e.g. init.hoc.")
    parser.add_argument('-c', '--on-changed', help="may be 'store-diff' or 'error': the action to take if the code in the repository or any of the dependencies has changed.", choices=['store-diff', 'error'])
    parser.add_argument('-g', '--labelgenerator', choices=['timestamp', 'uuid'], metavar='OPTION', help="specify which method Sumatra should use to generate labels (options: timestamp, uuid)")
    parser.add_argument('-t', '--timestamp_format', help="the timestamp format given to strftime")
    parser.add_argument('-L', '--launch_mode', choices=['serial', 'distributed', 'slurm-mpi'], help="how computations should be launched.")
    parser.add_argument('-o', '--launch_mode_options', help="extra options for the given launch mode, to be given in quotes with a leading space, e.g. ' --foo=3'")
    parser.add_argument('-p', '--plain', dest='plain', action='store_true', help="pass arguments to the 'run' command straight through to the program. Otherwise arguments of the form name=value can be used to overwrite default parameter values.")
    parser.add_argument('--no-plain', dest='plain', action='store_false', help="arguments to the 'run' command of the form name=value will overwrite default parameter values. This is the opposite of the --plain option.")
    parser.add_argument('-s', '--store', help="Change the record store to the specified path, URL or URI (must be specified). {0}".format(store_arg_help))

    datastore = parser.add_mutually_exclusive_group()
    datastore.add_argument('-W', '--webdav', metavar='URL', help="specify a webdav URL (with username@password: if needed) as the archiving location for data")
    datastore.add_argument('-A', '--archive', metavar='PATH', help="specify a directory in which to archive output datafiles. If not specified, or if 'false', datafiles are not archived.")
    datastore.add_argument('-M', '--mirror', metavar='URL', help="specify a URL at which your datafiles will be mirrored.")

    parser.add_argument('--add-plugin', help="name of a Python module containing one or more plug-ins.")
    parser.add_argument('--remove-plugin', help="name of a plug-in module to remove from the project.")

    args = parser.parse_args(argv)

    project = load_project()
    if args.store:
        new_store = get_record_store(args.store)
        project.change_record_store(new_store)
    if args.datapath:
        project.data_store.root = args.datapath
    if args.archive:
        if args.archive.lower() == "true":
            args.archive = ".smt/archive"
        if hasattr(project.data_store, 'archive_store'):  # current data store is archiving
            if args.archive.lower() == 'false':
                project.data_store = get_data_store("FileSystemDataStore",
                                                    {"root": project.data_store.root})
            else:
                project.data_store.archive_store = args.archive
        else:  # current data store is not archiving
            if args.archive.lower() != 'false':
                project.data_store = get_data_store("ArchivingFileSystemDataStore",
                                                    {"root": project.data_store.root, "archive": args.archive})
    elif args.mirror:
        project.data_store = get_data_store("MirroredFileSystemDataStore",
                                            {"root": project.data_store.root, "mirror_base_url": args.mirror})
    elif args.webdav:
        # should we care about archive migration??
        project.data_store = get_data_store("DavFsDataStore",
                                            {"root": project.data_store.root, "dav_url": args.webdav})
        project.data_store.archive_store = '.smt/archive'
    if args.input:
        project.input_datastore.root = args.input
    if args.repository:
        repository = get_repository(args.repository)
        repository.checkout()
        project.default_repository = repository
    if args.main:
        project.default_main_file = args.main
    if args.executable:
        executable_path, executable_options = parse_executable_str(args.executable)
        project.default_executable = get_executable(executable_path,
                                                    script_file=args.main or project.default_main_file)
        project.default_executable.options = executable_options

    if args.on_changed:
        project.on_changed = args.on_changed
    if args.addlabel:
        project.data_label = args.addlabel
    if args.labelgenerator:
        project.label_generator = args.labelgenerator
    if args.timestamp_format:
        project.timestamp_format = args.timestamp_format
    if args.launch_mode:
        project.default_launch_mode = get_launch_mode(args.launch_mode)()
    if args.launch_mode_options:
        project.default_launch_mode.options = args.launch_mode_options.strip()
    if args.plain is not None:
        project.allow_command_line_parameters = not args.plain
    if args.add_plugin:
        project.load_plugins(args.add_plugin)
    if args.remove_plugin:
        project.remove_plugins(args.remove_plugin)
    project.save()
Ejemplo n.º 11
0
def init(argv):
    """Create a new project in the current directory."""
    usage = "%(prog)s init [options] NAME"
    description = "Create a new project called NAME in the current directory."
    parser = ArgumentParser(usage=usage,
                            description=description)
    parser.add_argument('project_name', metavar='NAME', help="a short name for the project; should not contain spaces.")
    parser.add_argument('-d', '--datapath', metavar='PATH', default='./Data', help="set the path to the directory in which smt will search for output datafiles generated by the simulation/analysis. Defaults to %(default)s.")
    parser.add_argument('-i', '--input', metavar='PATH', default='/', help="set the path to the directory relative to which input datafile paths will be given. Defaults to the filesystem root.")
    parser.add_argument('-l', '--addlabel', choices=['cmdline', 'parameters', None], metavar='OPTION',
                        default=None, help="If this option is set, smt will append the record label either to the command line (option 'cmdline') or to the parameter file (option 'parameters'), and will add the label to the datapath when searching for datafiles. It is up to the user to make use of this label inside their program to ensure files are created in the appropriate location.")
    parser.add_argument('-e', '--executable', metavar='PATH', help="set the path to the executable. If this is not set, smt will try to infer the executable from the value of the --main option, if supplied, and will try to find the executable from the PATH environment variable, then by searching various likely locations on the filesystem.")
    parser.add_argument('-r', '--repository', help="the URL of a Subversion or Mercurial repository containing the code. This will be checked out/cloned into the current directory.")
    parser.add_argument('-m', '--main', help="the name of the script that would be supplied on the command line if running the simulation or analysis normally, e.g. init.hoc.")
    parser.add_argument('-c', '--on-changed', default='error', help="the action to take if the code in the repository or any of the depdendencies has changed. Defaults to %(default)s")  # need to add list of allowed values
    parser.add_argument('-s', '--store', help="Specify the path, URL or URI to the record store (must be specified). This can either be an existing record store or one to be created. {0} Not using the `--store` argument defaults to a DjangoRecordStore with Sqlite in `.smt/records`".format(store_arg_help))
    parser.add_argument('-g', '--labelgenerator', choices=['timestamp', 'uuid'], default='timestamp', metavar='OPTION', help="specify which method Sumatra should use to generate labels (options: timestamp, uuid)")
    parser.add_argument('-t', '--timestamp_format', help="the timestamp format given to strftime", default=TIMESTAMP_FORMAT)
    parser.add_argument('-L', '--launch_mode', choices=['serial', 'distributed', 'slurm-mpi'], default='serial', help="how computations should be launched. Defaults to %(default)s")
    parser.add_argument('-o', '--launch_mode_options', help="extra options for the given launch mode")

    datastore = parser.add_mutually_exclusive_group()
    datastore.add_argument('-W', '--webdav', metavar='URL', help="specify a webdav URL (with username@password: if needed) as the archiving location for data")
    datastore.add_argument('-A', '--archive', metavar='PATH', help="specify a directory in which to archive output datafiles. If not specified, or if 'false', datafiles are not archived.")
    datastore.add_argument('-M', '--mirror', metavar='URL', help="specify a URL at which your datafiles will be mirrored.")

    args = parser.parse_args(argv)

    try:
        project = load_project()
        parser.error("A project already exists in directory '{0}'.".format(project.path))
    except Exception:
        pass

    if not os.path.exists(".smt"):
        os.mkdir(".smt")

    if args.repository:
        repository = get_repository(args.repository)
        repository.checkout()
    else:
        repository = get_working_copy().repository  # if no repository is specified, we assume there is a working copy in the current directory.

    if args.executable:
        executable_path, executable_options = parse_executable_str(args.executable)
        executable = get_executable(path=executable_path)
        executable.args = executable_options
    elif args.main:
        try:
            executable = get_executable(script_file=args.main)
        except Exception:  # assume unrecognized extension - really need more specific exception type
            # should warn that extension unrecognized
            executable = None
    else:
        executable = None
    if args.store:
        record_store = get_record_store(args.store)
    else:
        record_store = 'default'

    if args.webdav:
        # should we care about archive migration??
        output_datastore = get_data_store("DavFsDataStore", {"root": args.datapath, "dav_url": args.webdav})
        args.archive = '.smt/archive'
    elif args.archive and args.archive.lower() != 'false':
        if args.archive.lower() == "true":
            args.archive = ".smt/archive"
        args.archive = os.path.abspath(args.archive)
        output_datastore = get_data_store("ArchivingFileSystemDataStore", {"root": args.datapath, "archive": args.archive})
    elif args.mirror:
        output_datastore = get_data_store("MirroredFileSystemDataStore", {"root": args.datapath, "mirror_base_url": args.mirror})
    else:
        output_datastore = get_data_store("FileSystemDataStore", {"root": args.datapath})
    input_datastore = get_data_store("FileSystemDataStore", {"root": args.input})

    if args.launch_mode_options:
        args.launch_mode_options = args.launch_mode_options.strip()
    launch_mode = get_launch_mode(args.launch_mode)(options=args.launch_mode_options)

    project = Project(name=args.project_name,
                      default_executable=executable,
                      default_repository=repository,
                      default_main_file=args.main,  # what if incompatible with executable?
                      default_launch_mode=launch_mode,
                      data_store=output_datastore,
                      record_store=record_store,
                      on_changed=args.on_changed,
                      data_label=args.addlabel,
                      input_datastore=input_datastore,
                      label_generator=args.labelgenerator,
                      timestamp_format=args.timestamp_format)
    if os.path.exists('.smt') and project.record_store.has_project(project.name):
        f = open('.smt/labels', 'w')
        f.writelines(project.format_records(tags=None, mode='short', format='text', reverse=False))
        f.close()
    project.save()
Ejemplo n.º 12
0
 def test_get_record_store_create_shelve(self):
     assert not os.path.exists("test_record_store.shelf")
     self.assertIsInstance(get_record_store("test_record_store.shelf"),
                           shelve_store.ShelveRecordStore)
Ejemplo n.º 13
0
def configure(argv):
    """Modify the settings for the current project."""
    usage = "%(prog)s configure [options]"
    description = "Modify the settings for the current project."
    parser = ArgumentParser(usage=usage,
                            description=description)
    parser.add_argument('-d', '--datapath', metavar='PATH', help="set the path to the directory in which smt will search for datafiles generated by the simulation or analysis.")
    parser.add_argument('-i', '--input', metavar='PATH', default=None, help="set the path to the directory in which smt will search for input datafiles.")
    parser.add_argument('-l', '--addlabel', choices=['cmdline', 'parameters', None], metavar='OPTION',
                        default=None, help="If this option is set, smt will append the record label either to the command line (option 'cmdline') or to the parameter file (option 'parameters'), and will add the label to the datapath when searching for datafiles. It is up to the user to make use of this label inside their program to ensure files are created in the appropriate location.")
    parser.add_argument('-e', '--executable', metavar='PATH', help="set the path to the executable.")
    parser.add_argument('-r', '--repository', help="the URL of a Subversion or Mercurial repository containing the code. This will be checked out/cloned into the current directory.")
    parser.add_argument('-m', '--main', help="the name of the script that would be supplied on the command line if running the simulator normally, e.g. init.hoc.")
    parser.add_argument('-c', '--on-changed', help="may be 'store-diff' or 'error': the action to take if the code in the repository or any of the dependencies has changed.", choices=['store-diff', 'error'])
    parser.add_argument('-g', '--labelgenerator', choices=['timestamp', 'uuid'], metavar='OPTION', help="specify which method Sumatra should use to generate labels (options: timestamp, uuid)")
    parser.add_argument('-t', '--timestamp_format', help="the timestamp format given to strftime")
    parser.add_argument('-L', '--launch_mode', choices=['serial', 'distributed', 'slurm-mpi'], help="how computations should be launched.")
    parser.add_argument('-o', '--launch_mode_options', help="extra options for the given launch mode, to be given in quotes with a leading space, e.g. ' --foo=3'")
    parser.add_argument('-p', '--plain', action='store_true', help="pass arguments to the run command straight through to the program.")
    parser.add_argument('-s', '--store', help="Change the record store to the specified path, URL or URI (must be specified). {0}".format(store_arg_help))

    datastore = parser.add_mutually_exclusive_group()
    datastore.add_argument('-W', '--webdav', metavar='URL', help="specify a webdav URL (with username@password: if needed) as the archiving location for data")
    datastore.add_argument('-A', '--archive', metavar='PATH', help="specify a directory in which to archive output datafiles. If not specified, or if 'false', datafiles are not archived.")
    datastore.add_argument('-M', '--mirror', metavar='URL', help="specify a URL at which your datafiles will be mirrored.")

    args = parser.parse_args(argv)

    project = load_project()
    if args.store:
        new_store = get_record_store(args.store)
        project.change_record_store(new_store)

    if args.archive:
        if args.archive.lower() == "true":
            args.archive = ".smt/archive"
        if hasattr(project.data_store, 'archive_store'):  # current data store is archiving
            if args.archive.lower() == 'false':
                project.data_store = get_data_store("FileSystemDataStore", {"root": project.data_store.root})
            else:
                project.data_store.archive_store = args.archive
        else:  # current data store is not archiving
            if args.archive.lower() != 'false':
                project.data_store = get_data_store("ArchivingFileSystemDataStore", {"root": args.datapath, "archive": args.archive})
    if args.webdav:
        # should we care about archive migration??
        project.data_store = get_data_store("DavFsDataStore", {"root": args.datapath, "dav_url": args.webdav})
        project.data_store.archive_store = '.smt/archive'
    if args.datapath:
        project.data_store.root = args.datapath
    if args.input:
        project.input_datastore.root = args.input
    if args.repository:
        repository = get_repository(args.repository)
        repository.checkout()
        project.default_repository = repository
    if args.main:
        project.default_main_file = args.main
    if args.executable:
        executable_path, executable_options = parse_executable_str(args.executable)
        project.default_executable = get_executable(executable_path,
                                                    script_file=args.main or project.default_main_file)
        project.default_executable.options = executable_options

    if args.on_changed:
        project.on_changed = args.on_changed
    if args.addlabel:
        project.data_label = args.addlabel
    if args.labelgenerator:
        project.label_generator = args.labelgenerator
    if args.timestamp_format:
        project.timestamp_format = args.timestamp_format
    if args.launch_mode:
        project.default_launch_mode = get_launch_mode(args.launch_mode)()
    if args.launch_mode_options:
        project.default_launch_mode.options = args.launch_mode_options.strip()
    if args.plain:
        project.allow_command_line_parameters = False
    project.save()
Ejemplo n.º 14
0
def init(argv):
    """Create a new project in the current directory."""
    usage = "%(prog)s init [options] NAME"
    description = "Create a new project called NAME in the current directory."
    parser = ArgumentParser(usage=usage,
                            description=description)
    parser.add_argument('project_name', metavar='NAME', help="a short name for the project; should not contain spaces.")
    parser.add_argument('-d', '--datapath', metavar='PATH', default='./Data', help="set the path to the directory in which smt will search for output datafiles generated by the simulation/analysis. Defaults to %(default)s.")
    parser.add_argument('-i', '--input', metavar='PATH', default='/', help="set the path to the directory relative to which input datafile paths will be given. Defaults to the filesystem root.")
    parser.add_argument('-l', '--addlabel', choices=['cmdline', 'parameters', None], metavar='OPTION',
                        default=None, help="If this option is set, smt will append the record label either to the command line (option 'cmdline') or to the parameter file (option 'parameters'), and will add the label to the datapath when searching for datafiles. It is up to the user to make use of this label inside their program to ensure files are created in the appropriate location.")
    parser.add_argument('-e', '--executable', metavar='PATH', help="set the path to the executable. If this is not set, smt will try to infer the executable from the value of the --main option, if supplied, and will try to find the executable from the PATH environment variable, then by searching various likely locations on the filesystem.")
    parser.add_argument('-r', '--repository', help="the URL of a Subversion or Mercurial repository containing the code. This will be checked out/cloned into the current directory.")
    parser.add_argument('-m', '--main', help="the name of the script that would be supplied on the command line if running the simulation or analysis normally, e.g. init.hoc.")
    parser.add_argument('-c', '--on-changed', default='error', help="the action to take if the code in the repository or any of the depdendencies has changed. Defaults to %(default)s")  # need to add list of allowed values
    parser.add_argument('-s', '--store', help="Specify the path, URL or URI to the record store (must be specified). This can either be an existing record store or one to be created. {0} Not using the `--store` argument defaults to a DjangoRecordStore with Sqlite in `.smt/records`".format(store_arg_help))
    parser.add_argument('-g', '--labelgenerator', choices=['timestamp', 'uuid'], default='timestamp', metavar='OPTION', help="specify which method Sumatra should use to generate labels (options: timestamp, uuid)")
    parser.add_argument('-t', '--timestamp_format', help="the timestamp format given to strftime", default=TIMESTAMP_FORMAT)
    parser.add_argument('-L', '--launch_mode', choices=['serial', 'distributed', 'slurm-mpi'], default='serial', help="how computations should be launched. Defaults to %(default)s")
    parser.add_argument('-o', '--launch_mode_options', help="extra options for the given launch mode")

    datastore = parser.add_mutually_exclusive_group()
    datastore.add_argument('-W', '--webdav', metavar='URL', help="specify a webdav URL (with username@password: if needed) as the archiving location for data")
    datastore.add_argument('-A', '--archive', metavar='PATH', help="specify a directory in which to archive output datafiles. If not specified, or if 'false', datafiles are not archived.")
    datastore.add_argument('-M', '--mirror', metavar='URL', help="specify a URL at which your datafiles will be mirrored.")

    args = parser.parse_args(argv)

    try:
        project = load_project()
        parser.error("A project already exists in this directory.")
    except Exception:
        pass

    if not os.path.exists(".smt"):
        os.mkdir(".smt")

    if args.repository:
        repository = get_repository(args.repository)
        repository.checkout()
    else:
        repository = get_working_copy().repository  # if no repository is specified, we assume there is a working copy in the current directory.

    if args.executable:
        executable_path, executable_options = parse_executable_str(args.executable)
        executable = get_executable(path=executable_path)
        executable.args = executable_options
    elif args.main:
        try:
            executable = get_executable(script_file=args.main)
        except Exception:  # assume unrecognized extension - really need more specific exception type
            # should warn that extension unrecognized
            executable = None
    else:
        executable = None
    if args.store:
        record_store = get_record_store(args.store)
    else:
        record_store = 'default'

    if args.webdav:
        # should we care about archive migration??
        output_datastore = get_data_store("DavFsDataStore", {"root": args.datapath, "dav_url": args.webdav})
        args.archive = '.smt/archive'
    elif args.archive and args.archive.lower() != 'false':
        if args.archive.lower() == "true":
            args.archive = ".smt/archive"
        args.archive = os.path.abspath(args.archive)
        output_datastore = get_data_store("ArchivingFileSystemDataStore", {"root": args.datapath, "archive": args.archive})
    elif args.mirror:
        output_datastore = get_data_store("MirroredFileSystemDataStore", {"root": args.datapath, "mirror_base_url": args.mirror})
    else:
        output_datastore = get_data_store("FileSystemDataStore", {"root": args.datapath})
    input_datastore = get_data_store("FileSystemDataStore", {"root": args.input})

    if args.launch_mode_options:
        args.launch_mode_options = args.launch_mode_options.strip()
    launch_mode = get_launch_mode(args.launch_mode)(options=args.launch_mode_options)

    project = Project(name=args.project_name,
                      default_executable=executable,
                      default_repository=repository,
                      default_main_file=args.main,  # what if incompatible with executable?
                      default_launch_mode=launch_mode,
                      data_store=output_datastore,
                      record_store=record_store,
                      on_changed=args.on_changed,
                      data_label=args.addlabel,
                      input_datastore=input_datastore,
                      label_generator=args.labelgenerator,
                      timestamp_format=args.timestamp_format)
    project.save()
Ejemplo n.º 15
0
 def test_get_record_store_create_shelve(self):
     assert len(glob("test_record_store.shelf*")) == 0
     self.assertIsInstance(get_record_store("test_record_store.shelf"), shelve_store.ShelveRecordStore)
Ejemplo n.º 16
0
def merge(sources, target, keep, backup, verbose):
    """
    Merge entries multiple record stores.
    
    SOURCES may be either record store files or directories; directories are
    recursed into. If directories, they should only contain record store files.
    Hidden files and directories (those starting with '.') are skipped.

    Intended usage is for combining run data that was recorded in separate
    record stores with the --record-store option of `smttask run`.
    E.g., if multiple runs used all different stores and placed them under
    the directory 'run/tmp_stores', they can be merged into the current project:

        smttask store merge run/tmp_stores

    To merge into a record store at a different location:

        smttask store merge run/tmp_stores --target path/to/record_store
    """
    sources = tuple(
        Path(p) for p in sources
    )  # With v8, we could do this by passing a 'path_type' argument to click.Path

    # Reference: sumatra.commands:sync()
    import shutil
    import textwrap
    from django.db import connections
    from sumatra.recordstore import get_record_store
    from .utils import sync_one_way

    # Concatenate source files, recursing into directories
    source_files = []
    for store_path in sources:
        if store_path.is_dir():
            for dirpath, dirnames, filenames in os.walk(store_path):
                # Skip hidden files and directories
                for dirname in dirnames[:]:
                    if dirname.startswith('.'):
                        dirnames.remove(dirname)
                for filename in filenames[:]:
                    if filename.startswith('.'):
                        filenames.remove(filename)
                # Add non-hidden files to the list of sources
                source_files.extend(
                    sorted(Path(dirpath) / filename for filename in filenames))
        else:
            source_files.append(store_path)

    if len(source_files) == 0:
        print("No files were found at the given location. Exiting.")
        return

    if target is None:
        target_store = config.project.record_store
    else:
        target_store = get_record_store(str(target))

    if backup:
        target_store.backup()

    all_collisions = {}
    # NB: Django requires that all record stores be loaded before using any of them
    src_stores = [
        get_record_store(str(src_path))
        for src_path in tqdm(source_files, desc="Loading record stores")
    ]
    for src_path, src_store in tqdm(zip(source_files, src_stores),
                                    desc="Merging record stores",
                                    total=len(src_stores)):
        collisions = sync_one_way(src_store, target_store, config.project.name)
        # Before moving or deleting the file, we need to close the DB connection
        connections[src_store._db_label].close()
        # If the sync worked without collisions, now clean up the store file
        # Otherwise, add to the list of collisions to be printed once all stores are merged
        if collisions:
            all_collisions[src_path] = collisions
        elif not keep and backup:
            backupdir = Path(src_path).parent / ".backup"
            backupdir.mkdir(parents=True, exist_ok=True)
            backuppath = backupdir / Path(src_path).name
            src_path.rename(backuppath)
            if verbose:
                tqdm.write(
                    f"Moved record store to backup location {backuppath}")
        elif not keep and not backup:
            os.remove(src_path)
            if verbose:
                tqdm.write(f"Removed record store at location {src_path}.")
        elif verbose:
            tqdm.write(
                f"The record store at location {src_path} can be removed. "
                "(use --clean to do this automatically).")

    if all_collisions:
        print()
        print("Merge incomplete: the record names listed below occur in both "
              "the indicated source store and the target store, and the "
              "corresponding records in each store differ.")
        termcols = shutil.get_terminal_size().columns
        for src_path, collisions in all_collisions.items():
            print(src_path)
            print("  " + "\n  ".join(
                textwrap.wrap(", ".join(collisions), termcols - 5)))
            print()
Ejemplo n.º 17
0
 def test_get_record_store_http(self,):
     self.assertIsInstance(get_record_store("http://records.example.com/"), http_store.HttpRecordStore)
Ejemplo n.º 18
0
 def test_get_record_store_create_shelve(self):
     assert len(glob("test_record_store.shelf*")) == 0
     self.assertIsInstance(get_record_store("test_record_store.shelf"),
                           shelve_store.ShelveRecordStore)
Ejemplo n.º 19
0
 def test_get_record_store_http(self, ):
     self.assertIsInstance(get_record_store("http://records.example.com/"),
                           http_store.HttpRecordStore)
Ejemplo n.º 20
0
 def test_get_record_store_create_shelve(self):
     assert not os.path.exists("test_record_store.shelf")
     self.assertIsInstance(get_record_store("test_record_store.shelf"),
                           shelve_store.ShelveRecordStore)