Example #1
0
 def add_repo(self, name, uri, repo_type="rpm-md", prio=None):
     repo_file = self.shared_zypper_dir["reposd-dir"] + "/" + name + ".repo"
     self.repo_names.append(name + ".repo")
     if "iso-mount" in uri:
         # iso mount point is a tmpdir, thus different each time we build
         Path.wipe(repo_file)
     if not os.path.exists(repo_file):
         Command.run(
             ["zypper"]
             + self.zypper_args
             + [
                 "--root",
                 self.root_dir,
                 "addrepo",
                 "-f",
                 "--type",
                 self.__translate_repo_type(repo_type),
                 "--keep-packages",
                 uri,
                 name,
             ],
             self.command_env,
         )
         if prio:
             Command.run(
                 ["zypper"] + self.zypper_args + ["--root", self.root_dir, "modifyrepo", "-p", format(prio), name],
                 self.command_env,
             )
Example #2
0
    def get_entries_from_path(self, path_list, match, recursion, test_mode, get_files, get_dirs, get_symlinks):
        entries = []

        for folder in path_list:
            log.verbose('Scanning folder %s. Recursion is set to %s.' % (folder, recursion))
            folder = Path(folder).expanduser()
            log.debug('Scanning %s' % folder)
            base_depth = len(folder.splitall())
            max_depth = self.get_max_depth(recursion, base_depth)
            folder_objects = self.get_folder_objects(folder, recursion)
            for path_object in folder_objects:
                log.debug('Checking if %s qualifies to be added as an entry.' % path_object)
                try:
                    path_object.exists()
                except UnicodeError:
                    log.error('File %s not decodable with filesystem encoding: %s' % (
                        path_object, sys.getfilesystemencoding()))
                    continue
                entry = None
                object_depth = len(path_object.splitall())
                if object_depth <= max_depth:
                    if match(path_object):
                        if (path_object.isdir() and get_dirs) or (
                                path_object.islink() and get_symlinks) or (
                                path_object.isfile() and not path_object.islink() and get_files):
                            entry = self.create_entry(path_object, test_mode)
                        else:
                            log.debug("Path object's %s type doesn't match requested object types." % path_object)
                        if entry and entry not in entries:
                            entries.append(entry)

        return entries
def do_mount(path, name):
    d3 = workspace_dir(name)
    d2 = Path(path).abspath()
    if (not d3.exists()):
        if d2.exists():
            d2.symlink(d3)
    return d3
def load(database_name):
    database_name = Path(database_name)
    json_filename = Path(database_name.namebase + '.json')
    db_filename = Path(database_name.namebase + '.sqlite')
    if not db_filename.exists() or json_filename.mtime > db_filename.mtime:
        return load_from_json(json_filename)
    return load_from_database(db_filename)
Example #5
0
 def on_task_filter(self, task, config):
     if not task.accepted:
         log.debug('No accepted entries, not scanning for existing.')
         return
     log.verbose('Scanning path(s) for existing files.')
     config = self.prepare_config(config)
     filenames = {}
     for folder in config:
         folder = Path(folder).expanduser()
         if not folder.exists():
             raise plugin.PluginWarning('Path %s does not exist' % folder, log)
         for p in folder.walk(errors='ignore'):
             key = p.name
             # windows file system is not case sensitive
             if platform.system() == 'Windows':
                 key = key.lower()
             filenames[key] = p
     for entry in task.accepted:
         # priority is: filename, location (filename only), title
         name = Path(entry.get('filename', entry.get('location', entry['title']))).name
         if platform.system() == 'Windows':
             name = name.lower()
         if name in filenames:
             log.debug('Found %s in %s' % (name, filenames[name]))
             entry.reject('exists in %s' % filenames[name])
Example #6
0
def cached_function(inputs, outputs):
    import theano

    with Message("Hashing theano fn"):
        if hasattr(outputs, "__len__"):
            hash_content = tuple(map(theano.pp, outputs))
        else:
            hash_content = theano.pp(outputs)
    cache_key = hex(hash(hash_content) & (2 ** 64 - 1))[:-1]
    cache_dir = Path("~/.hierctrl_cache")
    cache_dir = cache_dir.expanduser()
    cache_dir.mkdir_p()
    cache_file = cache_dir / ("%s.pkl" % cache_key)
    if cache_file.exists():
        with Message("unpickling"):
            with open(cache_file, "rb") as f:
                try:
                    return pickle.load(f)
                except Exception:
                    pass
    with Message("compiling"):
        fun = compile_function(inputs, outputs)
    with Message("picking"):
        with open(cache_file, "wb") as f:
            pickle.dump(fun, f, protocol=pickle.HIGHEST_PROTOCOL)
    return fun
def do_command(args, path ):
    d = Path('workspace')
    if path :
        d = d /  path
    print ("going into {0}".format(d))
    if (not d.exists()):
        raise Exception(d3)
    
    p = subprocess.Popen(args,
                         shell=True, 
                         #stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.PIPE,
                         cwd=d)
    stdout = p.stdout
    stderr = p.stderr
    stdout_o = stdout.read().decode("utf-8").split("\n") #.read()
    stderr_o = stderr.read().decode("utf-8").split("\n")
    retcode = p.wait();
    data = json.dumps({
        'path' : d,
        'stdout': stdout_o,
        'stderr': stderr_o,
        'cmd' : args,
        'retcode' : retcode,
    })        
        
    print (data)
    if not retcode == 0:
        raise Exception("bad return")
    return data
Example #8
0
def mf_list(app, args):
    """ list known makefiles """
    mkpath = Path(__file__).dirname().dirname() / 'etc' / 'makefiles'
    mkfiles = mkpath.glob('*.mk')
    mkfiles = [x.basename().replace('.mk', '') for x in mkfiles]
    for f in mkfiles:
        print(f)
Example #9
0
 def _set_tuple_marker(self):
     marker = self._marker
     if is_numlike(marker[0]):
         if len(marker) == 2:
             numsides, rotation = marker[0], 0.0
         elif len(marker) == 3:
             numsides, rotation = marker[0], marker[2]
         symstyle = marker[1]
         if symstyle == 0:
             self._path = Path.unit_regular_polygon(numsides)
             self._joinstyle = 'miter'
         elif symstyle == 1:
             self._path = Path.unit_regular_star(numsides)
             self._joinstyle = 'bevel'
         elif symstyle == 2:
             self._path = Path.unit_regular_asterisk(numsides)
             self._filled = False
             self._joinstyle = 'bevel'
         elif symstyle == 3:
             self._path = Path.unit_circle()
         self._transform = Affine2D().scale(0.5).rotate_deg(rotation)
     else:
         verts = np.asarray(marker[0])
         path = Path(verts)
         self._set_custom_marker(path)
Example #10
0
    def testMakeDirs(self):
        d = Path(self.tempdir)

        # Placeholder file so that when removedirs() is called,
        # it doesn't remove the temporary directory itself.
        tempf = d / 'temp.txt'
        tempf.touch()
        try:
            foo = d / 'foo'
            boz = foo / 'bar' / 'baz' / 'boz'
            boz.makedirs()
            try:
                self.assert_(boz.isdir())
            finally:
                boz.removedirs()
            self.failIf(foo.exists())
            self.assert_(d.exists())

            foo.mkdir(0o750)
            boz.makedirs(0o700)
            try:
                self.assert_(boz.isdir())
            finally:
                boz.removedirs()
            self.failIf(foo.exists())
            self.assert_(d.exists())
        finally:
            os.remove(tempf)
Example #11
0
    def __init__(
        self,
        driver,
        selector_translator,
        default_timeout=5,
        screenshot_directory=None,
        screenshot_fix_directory=None,
        screenshot_diff_directory=None,
        simex=None,
    ):
        self._driver = driver
        self._selector_translator = selector_translator
        self._default_timeout = default_timeout
        self._screenshot_directory = None
        self._screenshot_fix_directory = None
        self._screenshot_diff_directory = None
        self._simex = simex if simex is not None else DefaultSimex(flexible_whitespace=True)

        if screenshot_directory is not None:
            self._screenshot_directory = Path(screenshot_directory)
            if not self._screenshot_directory.exists():
                self._screenshot_directory.mkdir()
        if screenshot_diff_directory is not None:
            self._screenshot_diff_directory = Path(screenshot_diff_directory)
            if not self._screenshot_diff_directory.exists():
                self._screenshot_diff_directory.mkdir()
        if screenshot_fix_directory is not None:
            self._screenshot_fix_directory = Path(screenshot_fix_directory)
            assert self._screenshot_fix_directory.exists()
Example #12
0
 def testExplicitModuleClasses(self):
     """
     Multiple calls to path.using_module should produce the same class.
     """
     nt_path = Path.using_module(ntpath)
     self.assert_(nt_path is Path.using_module(ntpath))
     self.assertEqual(nt_path.__name__, 'Path_ntpath')
Example #13
0
 def testMakedirs_pReturnsSelf(self):
     """
     Path('foo').makedirs_p() == Path('foo')
     """
     p = Path(self.tempdir) / "newpath"
     ret = p.makedirs_p()
     self.assertEquals(p, ret)
Example #14
0
def memoryExecuteCommand(pathToConfigFile):
    """
    This function copies a configuration file from outside the ../cfg/ file to
    the ../cfg/ file to be executed by Valve's exec command. The reason why we
    need to copy the file is to ensure backwards compatibility as es_(x)mexec
    used to allow functionality of having configuration files outside of the
    ../cfg/ directory. Since the Valve console broke this functionality in the
    OB update, we need to simulate the functionality by copying the directory
    to a temporary unique file in the ../cfg/ directory.
    """
    pathToConfigFile = pathToConfigFile.strip('"')
    if not pathToConfigFile.startswith('..'):
        es.server.cmd('exec "%s"' % pathToConfigFile)
        return

    cfgFolder = Path(str(es.ServerVar('eventscripts_gamedir'))).joinpath('cfg')
    individualCFGFile = cfgFolder.joinpath(pathToConfigFile)

    uniqueString = hashlib.md5(str(time.time()).encode('utf-8')).hexdigest()
    configName = '%s.%s.mexec.cfg' % (individualCFGFile.namebase, uniqueString)
    newFile = cfgFolder.joinpath(configName)

    try:
        individualCFGFile.copyfile(newFile)
        es.server.cmd('exec "%s"' % configName)
        newFile.remove()
    except IOError:
        es.dbgmsg(0, "ERROR: es_mexec cannot find the file path %s" % pathToConfigFile)
Example #15
0
def install():
    install_packages()
    hookenv.log('Installing go')
    download_go()

    hookenv.log('Adding kubernetes and go to the path')
    address = hookenv.unit_private_ip()
    strings = [
        'export GOROOT=/usr/local/go\n',
        'export PATH=$PATH:$GOROOT/bin\n',
        'export KUBERNETES_MASTER=http://{0}:8080\n'.format(address),
    ]
    update_rc_files(strings)
    hookenv.log('Downloading kubernetes code')
    clone_repository()

    # Create the directory to store the keys and auth files.
    srv = Path('/srv/kubernetes')
    if not srv.isdir():
        srv.makedirs_p()

    hookenv.open_port(8080)
    hookenv.open_port(6443)
    hookenv.open_port(443)

    hookenv.log('Install complete')
Example #16
0
def enter_sandbox(request, pytestconfig):
    if not pytestconfig.option.sandbox:
        return
    if 'sandbox_path' in pytestconfig.inicfg:
        sandbox_prefix = pytestconfig.inicfg['sandbox_path']
    else:
        sandbox_prefix = '.sandbox/'

    pytestconfig._sandbox_old_dir = Path.getcwd()

    mod_dir, _, func = request.node.nodeid.rpartition('::')
    mod_dir = mod_dir.replace('::', '/')
    sand_path = Path(Path(sandbox_prefix).joinpath(Path(mod_dir).joinpath(func))).abspath()

    if sand_path.isdir():
        sand_path.removedirs_p()
    elif sand_path.isfile():
        sand_path.remove_p()
    sand_path.makedirs_p()

    os.chdir(sand_path)
    print('sandbox cwd: %s' % os.getcwd())

    def exit_sandbox():
        os.chdir(pytestconfig._sandbox_old_dir)
        # print('teardown cwd: %s' % os.getcwd())
        pytestconfig._sandbox_old_dir = None
    request.addfinalizer(exit_sandbox)
def detect_format(config):
    if 'format' not in config:
        try:
            filename = Path(config['filename'])
            with filename.open('r') as f:
                header = f.read(5)
            if filename.ext in ('.gb', '.gbk') or header.startswith('LOCUS'):
                log.debug("Attempting %s as genbank", filename)
                seqrec = genbank.parse_seq_rec(config['filename'])
                config['format'] = 'genbank'
                config['id'] = seqrec.id
                config['description'] = seqrec.description
                seq = str(seqrec.seq)
            elif filename.ext in ('.fna', '.fasta') or header.startswith('>'):
                seqrec = SeqIO.read(filename, 'fasta')
                config['format'] = 'fasta'
                config['id'] = seqrec.id
                config['description'] = seqrec.description
                seq = str(seqrec.seq)
            else:
                with filename.open('r') as f:
                    seq = f.read()
                seq = re.sub('\s', '', seq)
                config['format'] = 'raw'
            config['length'] = len(seq)
            ddna = derive_filename(config, filename.getmtime(), 'ddna')
            if not ddna.exists():
                with mkstemp_rename(ddna) as f:
                    f.write(seq.upper())
            config['ddna'] = ddna
        except:
            log.exception("Error detecting format")
            config['format'] = None
Example #18
0
def compile_locales():
    """Compile all messages.pot files."""
    base_path = Path(__file__).parent / 'locale'
    for locale in base_path.dirs():
        with (locale / 'LC_MESSAGES'):
            print('* translating', locale)
            check_output(['msgfmt', 'messages.pot'])
Example #19
0
def install_java():
    """Install java just like we do for Hadoop Base.

    This is the same method used to install java in HadoopBase:
    https://github.com/juju-solutions/jujubigdata/blob/master/jujubigdata/handlers.py#L134

    This allows us to run Pig in local mode (which requires Java) without
    any Hadoop. If Hadoop comes along later, we'll already have java installed
    in a way that is compatible with the plugin.

    NOTE: this will go away if/when we support the java interface.
    """
    env = utils.read_etc_env()
    java_installer = Path(jujuresources.resource_path('java-installer'))
    java_installer.chmod(0o755)
    output = check_output([java_installer], env=env).decode('utf8')
    lines = output.strip().splitlines()
    if len(lines) != 2:
        raise ValueError('Unexpected output from java-installer: %s' % output)
    java_home, java_version = lines
    if '_' in java_version:
        java_major, java_release = java_version.split("_")
    else:
        java_major, java_release = java_version, ''
    unitdata.kv().set('java.home', java_home)
    unitdata.kv().set('java.version', java_major)
    unitdata.kv().set('java.version.release', java_release)
Example #20
0
def backlight_set():
    '''dim the backlight after a set number of minutes'''
    bl_path = Path('/sys/class/backlight/intel_backlight/brightness')
    assert bl_path.access(W_OK)
    maxb_path = Path('/sys/class/backlight/intel_backlight/max_brightness')
    maxb = maxb_path.text().strip()
    maxb = '%d' % (int(maxb) // 1.2)
    while True:
        try:
            xset('dpms', 'force', 'on')
            bl_path.write_text(maxb)
            sleep(60*18)
            bl_path.write_text('300')
            sleep(60*2)
            cnt = 0
            step = 15
            minutes = 10
            while cnt < minutes*60:
                xmessage('-timeout', str(step-1), str(minutes*60-cnt))
                xset('dpms', 'force', 'off')
                sleep(step)
                cnt += step
        finally:
            bl_path.write_text(maxb)
    xset('dpms', 'force', 'on')
Example #21
0
    def retrieve(path, isel='all', lazy=True):
        path = Path(path)
        try:
            data = open_dataset(path / "data.nc")
            lazy = True
        except FileNotFoundError:
            data = open_mfdataset(path / "data*.nc",
                                  concat_dim="t").sortby("t")
        try:
            with open(Path(path) / 'metadata.yml', 'r') as yaml_file:
                metadata = yaml.load(yaml_file)
        except FileNotFoundError:
            # Ensure retro-compatibility with older version
            with open(path.glob("Treant.*.json")[0]) as f:
                metadata = json.load(f)["categories"]

        if isel == 'last':
            data = data.isel(t=-1)
        elif isel == 'all':
            pass
        elif isinstance(isel, dict):
            data = data.isel(**isel)
        else:
            data = data.isel(t=isel)

        if not lazy:
            return FieldsData(data=data.load(),
                              metadata=AttrDict(**metadata))

        return FieldsData(data=data,
                          metadata=AttrDict(**metadata))
Example #22
0
 def setup(self, name=None):
     filesystem = FileSystem(
         'btrfs', MappedDevice(device=self.device, device_provider=self)
     )
     filesystem.create_on_device(
         label=self.custom_args['root_label']
     )
     self.setup_mountpoint()
     Command.run(
         ['mount', self.device, self.mountpoint]
     )
     root_volume = self.mountpoint + '/@'
     Command.run(
         ['btrfs', 'subvolume', 'create', root_volume]
     )
     if self.custom_args['root_is_snapshot']:
         snapshot_volume = self.mountpoint + '/@/.snapshots'
         Command.run(
             ['btrfs', 'subvolume', 'create', snapshot_volume]
         )
         Path.create(snapshot_volume + '/1')
         snapshot = self.mountpoint + '/@/.snapshots/1/snapshot'
         Command.run(
             ['btrfs', 'subvolume', 'snapshot', root_volume, snapshot]
         )
         self.__set_default_volume('@/.snapshots/1/snapshot')
     else:
         self.__set_default_volume('@')
Example #23
0
def setup_demo(location, force=False):
    """Copy demo files to a directory.

    \b
    LOCATION: directory to add demofiles to (default: ./chanjo-demo)
    """
    target_dir = Path(location)
    pkg_dir = __name__.rpartition('.')[0]
    demo_dir = Path(resource_filename(pkg_dir, 'demo-files'))

    # make sure we don't overwrite exiting files
    for demo_file in resource_listdir(pkg_dir, 'demo-files'):
        target_file_path = target_dir.joinpath(demo_file)
        if not force and target_file_path.exists():
            log.error("%s exists, pick a different location", target_file_path)
            raise OSError(EEXIST, 'file already exists', target_file_path)

    try:
        # we can copy the directory(tree)
        demo_dir.copytree(target_dir)
    except OSError as error:
        log.warn('location must be a non-existing directory')
        raise error

    # inform the user
    log.info("successfully copied demo files to %s", target_dir)
Example #24
0
File: uri.py Project: k0da/kiwi-1
 def __del__(self):
     try:
         for mount in reversed(self.mount_stack):
             Command.run(['umount', mount])
             Path.remove(mount)
     except Exception:
         pass
Example #25
0
def load_pa11y_ignore_rules(file=None, url=None):  # pylint: disable=redefined-builtin
    """
    Load the pa11y ignore rules from the given file or URL.
    """
    if not file and not url:
        return None

    if file:
        file = Path(file)
        if not file.isfile():
            msg = (
                u"pa11y_ignore_rules_file specified, but file does not exist! {file}"
            ).format(file=file)
            raise ValueError(msg)
        return yaml.safe_load(file.text())

    # must be URL
    resp = requests.get(url)
    if not resp.ok:
        msg = (
            u"pa11y_ignore_rules_url specified, but failed to fetch URL. status={status}"
        ).format(status=resp.status_code)
        err = RuntimeError(msg)
        err.response = resp
        raise err
    return yaml.safe_load(resp.text)
Example #26
0
def get_ssh_key(user):
    generate_ssh_key(user)
    # allow ssh'ing to localhost; useful for things like start_dfs.sh
    authfile = ssh_key_dir(user) / 'authorized_keys'
    if not authfile.exists():
        Path.copy(ssh_pub_key(user), authfile)
    return ssh_pub_key(user).text()
Example #27
0
def download_comics(item):
# Set directory for saving comisc images
    path = Path("{domain}/{name}/".format(domain = item['domain'],
                                          name   = item['name']))
    if not path.set():
        ERROR("Can't set save path for comics \"{name}\"".format(name=item['name']))
        return

# Set first and last pages of download range
    if not check_last(item):
        ERROR("Can't check last issue in comics \"{name}\"".format(name=item['name']))

    start = get_start_page(item)
    stop  = get_stop_page(item)

    if item['page_current'] == stop:
        return

    # TODO: Move all regex dependent from domain into separate function
    regex_Image = re.compile(r"id=\"mainImage\" src=\"(\S+\.(\w+))\"")

# Start download comiсs
    for item['page_current'] in range(start, stop + 1):
        url = "https://{domain}/{name}/{page}".format(domain=item['domain'],
                                                      name  =item['relative_URL'],
                                                      page  =item['page_current'])
        
        try:
            page  = request.urlopen(url).read().decode('utf-8')
        except:
            ERROR("Can't load page \"{url}\"".format(url=url))
            return

        if process_image(page, regex_Image, item):
            item['downloaded_in_this_session'] += 1
Example #28
0
 def set_file(self, phrase, name, content, locked, owner):
     # pylint: disable=too-many-arguments
     phrase = phrase.casefold()
     if not phrase or not name or not content:
         return None
     newid = None
     while not newid or newid.exists():
         newid = Path("downloads") / "{!s}.cuckload".format(uuid4())
     newid.parent.mkdir_p()
     with open(newid, "wb") as outp:
         outp.write(content.read() if hasattr(content, "read") else content.encode("utf-8"))
     try:
         cur = self.conn.cursor()
         cur.execute("INSERT OR REPLACE INTO files (phrase, id, name, locked, owner) "
                     "VALUES(?, ?, ?, ?, ?)",
                     (phrase, newid, name, locked, owner))
         UploadDownloadCommand.changed = time()
         LOGGER.debug("changed %d", self.changed)
     except Exception:
         LOGGER.exception("Failed to add file")
         try:
             if not newid:
                 raise Exception("huh?")
             newid.unlink()
         except Exception:
             pass
Example #29
0
def update_etc_hosts(ips_to_names):
    '''
    Update /etc/hosts given a mapping of managed IP / hostname pairs.

    Note, you should not use this directly.  Instead, use :func:`update_kv_hosts`
    and :func:`manage_etc_hosts`.

    :param dict ips_to_names: mapping of IPs to hostnames (must be one-to-one)
    '''
    etc_hosts = Path('/etc/hosts')
    hosts_contents = etc_hosts.lines()
    IP_pat = re.compile(r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}')

    new_lines = []
    managed = {}
    for line in hosts_contents:
        if '# JUJU MANAGED' not in line:
            # pass-thru unmanaged lines unchanged
            new_lines.append(line)
    # add or update new hosts
    managed.update({name: ip for ip, name in ips_to_names.items()})

    # render all of our managed entries as lines
    for name, ip in managed.items():
        line = '%s %s  # JUJU MANAGED' % (ip, name)
        if not IP_pat.match(ip):
            line = '# %s (INVALID IP)' % line
        # add new host
        new_lines.append(line)

    # write new /etc/hosts
    etc_hosts.write_lines(new_lines, append=False)
Example #30
0
def write_examples(maker):
    maker.set_indent_level(0)
    maker.write_chapter("Examples")

    example_root = Path(os.path.join("pages", "examples"))
    example_files = [
        "Create_and_convert_to_string.rst",
        "Get_iterator.rst",
        "Test_whether_a_value_within_the_time_range.rst",
        "Test_whether_a_value_intersect_the_time_range.rst",
        "Make_an_intersected_time_range.rst",
        "Make_an_encompassed_time_range.rst",
        "Truncate_time_range.rst",
    ]

    for example_file in example_files:
        maker.write_file(example_root.joinpath(example_file))

    maker.inc_indent_level()
    maker.write_chapter("For more information")
    maker.write_lines(
        [
            "More examples are available at ",
            "https://datetimerange.rtfd.io/en/latest/pages/examples/index.html",
            "",
            "Examples with Jupyter Notebook is also available at "
            "`DateTimeRange.ipynb <https://nbviewer.jupyter.org/github/thombashi/DateTimeRange/tree/master/examples/DateTimeRange.ipynb>`__",
        ]
    )
 def project_headers_path(self):
     return Path.path(
         Configuration.current.build_directory.path_by_appending(
             self.name).absolute() + "/" + self.PROJECT_HEADERS_FOLDER_PATH)
def main():
    args = parser.parse_args()

    weights = torch.load(args.pretrained_posenet)
    pose_net = models.PoseNet().to(device)
    pose_net.load_state_dict(weights['state_dict'], strict=False)
    pose_net.eval()

    seq_length = 5
    dataset_dir = Path(args.dataset_dir)
    framework = test_framework(dataset_dir, args.sequences, seq_length)
    print('{} snippets to test'.format(len(framework)))

    errors = np.zeros((len(framework), 2), np.float32)
    if args.output_dir is not None:
        output_dir = Path(args.output_dir)
        output_dir.makedirs_p()
        predictions_array = np.zeros((len(framework), seq_length, 3, 4))

    for j, sample in enumerate(tqdm(framework)):
        imgs = sample['imgs']

        h, w, _ = imgs[0].shape
        if (h != args.img_height or w != args.img_width):
            imgs = [imresize(img, (args.img_height, args.img_width)).astype(
                np.float32) for img in imgs]

        imgs = [np.transpose(img, (2, 0, 1)) for img in imgs]

        tensor_imgs = []
        for i, img in enumerate(imgs):
            img = ((torch.from_numpy(img).unsqueeze(
                0) / 255 - 0.5) / 0.5).to(device)
            tensor_imgs.append(img)

        global_pose = np.identity(4)
        poses = []
        poses.append(global_pose[0:3, :])

        for iter in range(seq_length - 1):
            pose = pose_net(tensor_imgs[iter], tensor_imgs[iter + 1])
            pose_mat = pose_vec2mat(pose).squeeze(0).cpu().numpy()
            pose_mat = np.vstack([pose_mat, np.array([0, 0, 0, 1])])

            global_pose = global_pose @ np.linalg.inv(pose_mat)
            poses.append(global_pose[0:3, :])

        final_poses = np.stack(poses, axis=0)

        if args.output_dir is not None:
            predictions_array[j] = final_poses

        ATE, RE = compute_pose_error(sample['poses'], final_poses)
        errors[j] = ATE, RE

    mean_errors = errors.mean(0)
    std_errors = errors.std(0)
    error_names = ['ATE', 'RE']
    print('')
    print("Results")
    print("\t {:>10}, {:>10}".format(*error_names))
    print("mean \t {:10.4f}, {:10.4f}".format(*mean_errors))
    print("std \t {:10.4f}, {:10.4f}".format(*std_errors))

    if args.output_dir is not None:
        np.save(output_dir/'predictions.npy', predictions_array)
Example #33
0
  <configuration>: the aws client has already been configured using the awscli through the anaconda prompt.   
                   To do this `pip install awscli` and from the anaconda (or other python prompt) run `aws config` and follow the prompts.  

References: 
  Part of this project is a direct update for use with boto3 of https://peteris.rocks/blog/script-to-launch-amazon-ec2-spot-instances/ 
    
**Imports: Script will install non-native requirements automatically 

MIT License
"""

from path import Path
import argparse, sys, time, os

root = Path(os.path.dirname(os.path.abspath(__file__)))

from spot_connect import sutils, instances, methods, elastic_file_systems


def main():  # Main execution

    profiles = sutils.load_profiles()

    parser = argparse.ArgumentParser(description='Launch spot instance')
    parser.add_argument('-n',
                        '--name',
                        help='Name of the spot instance',
                        required=True)
    parser.add_argument('-p',
                        '--profile',
Example #34
0
    def _to_regular_bin_path(file_path):
        path_obj = Path(file_path)
        if path_obj.islink():
            return path_obj.readlinkabs()

        return file_path
Example #35
0

def save_to_word(path_holders):
    document = Document()
    document.add_heading('Document Title', level=0)

    document.add_heading('Scripts', level=1)

    for path_holder in path_holders:
        document.add_paragraph(path_holder.path.name, style='List Number')
        for script_name in path_holder.script_names:
            document.add_paragraph(parent_and_basename(script_name),
                                   style='List Bullet 2')

    document.add_heading('VERIFICATION', level=1)
    for path_holder in path_holders:
        document.add_paragraph(path_holder.path.name, style='List Number')
        for verification_script_name in path_holder.verification_script_names:
            document.add_paragraph(
                parent_and_basename(verification_script_name),
                style='List Bullet 2')

    document.save('test.docx')


path = Path(r"D:\path")
path_holders = [
    PathHolder(sub_path) for sub_path in path.listdir() if sub_path.isdir()
]
save_to_word(path_holders)
Example #36
0
examplePath = Path(roads, [
    2744, 2745, 2746, 2747, 85561, 62583, 46937, 42405, 19096, 17273, 46582,
    43933, 465367, 57190, 819204, 819205, 47816, 16620, 819206, 465324, 3421,
    819207, 19950, 819208, 529485, 646688, 646689, 646690, 646691, 646692,
    646693, 646694, 47335, 646695, 646696, 522500, 646680, 646681, 646682,
    646683, 7372, 867063, 867064, 867065, 867066, 867067, 867068, 867069,
    705491, 49950, 49921, 705498, 926772, 926773, 926774, 926775, 926776,
    870022, 593302, 921858, 926777, 926778, 926779, 43226, 926780, 811196,
    867037, 926781, 926782, 926783, 867045, 580849, 926784, 580855, 66143,
    867051, 880273, 926785, 880271, 926786, 926787, 926788, 926789, 926790,
    870232, 926791, 926792, 926793, 926794, 926795, 926796, 926797, 926798,
    867080, 867072, 926799, 926800, 863957, 11454, 603929, 866456, 50865,
    866457, 866458, 867002, 867001, 867000, 612734, 612735, 612736, 612737,
    612738, 612739, 870148, 96491, 96492, 612710, 542941, 870147, 542938,
    870146, 870141, 612704, 870145, 464552, 464553, 565107, 466517, 466518,
    466519, 466520, 466521, 466522, 466523, 466524, 466525, 754541, 754542,
    754543, 754544, 96365, 96364, 96363, 96362, 96361, 96360, 57033, 57032,
    57031, 57030, 57029, 50831, 50832, 12692, 50833, 50834, 12343, 50835,
    50836, 50837, 50838, 50839, 50840, 35506, 35505, 35504, 35503, 35502,
    35501, 35500, 35499, 35498, 35497, 35496, 35495, 35494, 35493, 866647,
    718703, 866648, 851323, 866649, 866650, 866651, 866652, 866653, 866717,
    851297, 866716, 851276, 737517, 851285, 866715, 73340, 866714, 866681,
    866688, 866713, 851302, 851301, 851300, 851288, 851271, 851272, 866687,
    866708, 866709, 737477, 737478, 73566, 737479, 605205, 605204, 724105,
    724107, 724106, 724089, 724088, 724087, 724086, 724085, 724084, 724083,
    724082, 724076, 724075, 724074, 724073, 724092, 724072, 871058, 871057,
    871045, 871056, 871055, 871054, 871053, 871052, 871074, 871103, 871102,
    871101, 871100, 871090, 871093, 871092, 866447, 603943, 870222, 870223,
    871091, 863947, 871118, 59648, 66173, 871124, 871123, 870175, 870176,
    870177, 870178, 870179, 580848, 626555, 870180, 870181, 870182, 66148,
    66149, 880269, 880272, 880273, 50879, 50880, 50881, 50882, 50883, 50884,
    50885, 50886, 50887, 867003, 867004, 867005, 867006, 867007, 867008,
    867009, 867010, 49926, 49953, 653054, 867011, 867012, 867013, 40970,
    867014, 867015, 866490, 867016, 32037, 32038, 32039, 11442, 32040, 32041,
    32029, 32030, 32031, 32032, 32033, 32034, 32035, 32036, 32042, 32048,
    32049, 32050, 32051, 32052, 32053, 32054, 32056, 32057, 32058, 32059,
    50692, 50693, 50694, 50695, 50698, 50699, 50700, 50701, 50702, 84369,
    84370, 84371, 84355, 12851, 12852, 84368, 17872, 12839, 12840, 84372,
    84373, 84374, 84375, 833224, 833225, 833226, 833227, 833228, 47377, 47378,
    47379, 47380, 47381, 47382, 47383, 47384, 47385, 47386, 47387, 50315,
    50316, 50317, 50264, 50265, 50266, 50267, 50268, 50269, 50270, 32847,
    32848, 32849, 32850, 32867, 32868, 32869, 32854, 893090, 691982, 893091,
    39521, 39522, 66404, 66405, 66406, 3362, 3361, 47519, 47520, 47521, 47520,
    47519, 3361, 3362, 645213, 645214, 465378, 49012, 3445, 869993, 46565,
    869994, 528502, 869995, 724326, 542442, 819349, 49976, 819350, 49003,
    49004, 49005, 48999, 544019, 867017, 866489, 867018, 867019, 867020,
    867021, 867022, 867023, 40969, 867024, 867025, 867026, 867027, 653049,
    49951, 49928, 867028, 867029, 867030, 867031, 867032, 867033, 867034,
    867035, 867036, 50888, 867037, 926781, 926782, 926783, 867045, 580849,
    926784, 580855, 66143, 867051, 880273, 926785, 880271, 926786, 926787,
    926788, 926789, 926790, 870232, 926791, 926792, 926793, 926794, 926795,
    926796, 926797, 926798, 867080, 867072, 926799, 926800, 863957, 11454,
    603929, 866456, 926801, 926802, 926803, 866737, 926804, 926805, 926806,
    926807, 926808, 926809, 926810, 866741, 926811, 73496, 926812, 35505,
    926813, 464545, 926814, 5221, 926815, 12344, 612832, 926816, 12691, 926817,
    47327, 57027, 926818, 46436, 866627, 465389, 819446, 819393, 599575,
    819437, 46383, 912048, 926819, 7408, 65683, 912061, 46314, 926820, 926821,
    926822, 25610, 11452, 46351, 528640, 529456, 65737, 65738, 65739, 65740,
    65741, 65742, 65693, 65743, 65744, 65745, 530546, 715860, 715861, 715862,
    715863, 525089, 715859, 530549, 715858, 715857, 25611, 25612, 25613, 25614,
    25615, 25616, 25617, 25618, 25619, 25620, 25621, 25622, 25623, 25624,
    654900, 654901, 654902, 654903, 654904, 654905, 525405, 525406, 525407,
    525408, 525409, 525410, 525413, 525414, 10201, 54744, 67029, 67030, 67031,
    67032, 67033, 67034, 67035, 67036, 67037, 67038, 67039, 67040, 67041,
    67042, 67043, 67044, 578769, 578762, 578761, 578760, 578759, 578758,
    578757, 578756, 578755, 742158, 742159, 742160, 742161, 742162, 742163,
    742164, 742165, 742166, 742167, 742168, 742169, 742170, 742171, 742172,
    742173, 742174, 742175, 742176, 742177, 742178, 524873, 742179, 742180,
    742181, 742182, 742183, 742184, 742185, 67045, 67046, 67047, 742186,
    742187, 742188, 742189, 742190, 742191, 742192, 742193, 742194, 742195,
    742196, 742197, 742198, 742199, 742200, 742201, 742202, 742203, 742204,
    67053, 67054, 67055, 67056, 67057, 929419, 929420, 608084, 80523, 80524,
    80525, 80526, 80527, 80528, 80513, 42180, 530737, 530760, 530759, 530728,
    530758, 530757, 22609, 22533, 22534, 22535, 22536, 22537, 22538, 22539,
    461074, 80364, 80365, 80366, 80367, 80368, 80369, 80370, 80371, 80372,
    80373, 80374, 80375, 80376, 80377, 80378, 42100, 22556, 22557, 33470,
    33471, 33472, 33473, 33474, 33475, 33476, 33477, 33478, 33479, 33480,
    33481, 33482, 33483, 33484, 33485, 33531, 33506, 33530, 33529, 33528,
    33542, 33541, 33507, 33508, 33509, 33510, 33511, 33512, 33513, 33514,
    33515, 33516, 33517, 33518, 33519, 33520, 33521, 33522, 33523, 33524,
    33525, 33526, 33527, 33344, 33345, 33346, 33347, 33348, 816680, 816681,
    33373, 33374, 33375, 33376, 33377, 23686, 23687, 23688, 23689, 23690,
    23691, 23692, 23693, 23694, 23695, 23696, 23697, 23698, 23699, 23700,
    23701, 23702, 23703, 595927, 595962, 595963, 595964, 595965, 595966,
    595967, 595968, 595969, 595970, 595971, 595972, 631017, 631018, 631019,
    631020, 631021, 631022, 631023, 631024, 631025, 631026, 631027, 631028,
    813481, 813482, 813483, 813484, 813485, 813486, 813487, 813488, 813489,
    813490, 813491, 813492, 813493, 813494, 33320, 33321, 33322, 33323, 33324,
    33325, 20735, 20734, 20733, 20732, 20731, 20730, 20729, 20728, 20727,
    20726, 20725, 20724, 20723, 20722, 20721, 20720, 33309, 33319, 822028,
    822029, 822030, 822031, 822032, 822033, 822034, 822035, 822036, 810182,
    810183, 810184, 810185, 810186, 810187, 810188, 810189, 810190, 810191,
    810192, 810193, 810194, 810195, 810196, 813469, 813470, 813471, 813472,
    813473, 813474, 813475, 813476, 813477, 813478, 813479, 813480, 516637,
    516638, 516639, 516640, 516641, 516642, 516643, 516644, 516645, 516646,
    516647, 516648, 516649, 516650, 516651, 516652, 595928, 595929, 816784,
    816785, 816786, 816787, 816788, 816789, 816790, 816791, 816792, 816793,
    816754, 816755, 816756, 816757, 816758, 816759, 816760, 816761, 816762,
    816763, 816764, 816765, 816766, 816767, 816768, 816769, 595931, 595932,
    595933, 595934, 595935, 595936, 595937, 595938, 595939, 595940, 595941,
    595942, 595943, 595944, 595945, 595946, 595947, 595948, 595949, 595950,
    595951, 595952, 33378, 33379, 33380, 33381, 33382, 33383, 33384, 33385,
    33386, 33387, 33388, 33389, 595955, 595956, 595957, 595958, 590266, 595959,
    33578, 595960, 595961, 23704, 23705, 23706, 23707, 23708, 8521, 23709,
    33551, 33552, 29805, 33553, 33554, 33555, 33556, 33557, 33558, 33559,
    25572, 33560, 33561, 33562, 33563, 523272, 523273, 25593, 523274, 523275,
    523276, 523277, 523278, 523279, 523280, 67126, 67127, 67128, 67129, 67130,
    67131, 67132, 67133, 67134, 67135, 67136, 67137, 67138, 67139, 67140,
    67141, 67142, 67143, 67144, 67180, 67181, 67182, 67183, 46521, 46500,
    702128, 906344, 906345, 906346, 906347, 906348, 29806, 29807, 29808, 96351,
    96352, 96353, 78130, 78153, 96354, 96288, 96293, 96355, 96356, 56155,
    96357, 96358, 96359, 754553, 542942, 542943, 542944, 542945, 542946,
    542947, 542948, 542949, 542950, 542951, 542952, 542953, 47436, 47433,
    869934, 869935, 869936, 869937, 869938, 869939, 869940, 869941, 866462,
    603921, 869942, 869943, 869891, 509508, 869944, 869945, 509497, 869946,
    869947, 869924, 509488, 869948, 49922, 49923, 49958, 49929, 49930, 49931,
    49932, 49933, 49934, 20249, 49935, 49936, 49937, 49938, 49939, 41000,
    49940, 49941, 49942, 49943, 49944, 49945, 49946, 49947, 705488, 705489,
    705490, 926823, 926824, 556247, 926825, 866512, 926826, 926827, 17515,
    17516, 522499, 522500, 522501, 522502, 522503, 522504, 522505, 522506,
    522507, 522508, 46249, 46250, 692854, 869872, 893018, 893019, 46251, 46252,
    46253, 6638, 46254, 46255, 46256, 46257, 46258, 46259, 46260, 46261, 16476,
    880299, 880278, 880300, 880301, 880302, 861849, 880303, 880304, 880305,
    880306, 864020, 893030, 893031, 85748, 893032, 893033, 19074, 19063,
    893023, 893024, 720919, 17834, 17835, 2789, 711278, 931081, 931082, 931083,
    893020, 893021, 893022, 17811, 17812, 893048, 893049, 893050, 893051,
    893037, 893038, 893039, 893040, 893041, 17853, 893042, 893043, 893044,
    893045, 893046, 893047, 544062, 741931, 741932, 544077, 17810, 936406,
    936407, 936374, 936375, 3324, 3325, 3326, 3327, 3328, 3329, 3330, 3331,
    3332, 3333, 35191, 35192, 35193, 35194, 35195, 35230, 35231, 35232, 35233,
    35234, 35235, 35227, 705712, 831914, 831915, 831916, 831917, 831918,
    831919, 826832, 826835, 699045, 546953, 546954, 546955, 35402, 546956,
    546957, 546958, 546959, 546960, 534808, 534809, 534810, 534811, 534812,
    534813, 534814, 534815, 534816, 534817, 534818, 534819, 534820, 534821,
    534822, 534823, 534824, 534825, 534826, 546980, 546981, 546982, 546983,
    546984, 546985, 546986, 546987, 546988, 546989, 546990, 890626, 890627,
    890628, 890629, 815067, 725581, 710786, 725586, 715774, 816462, 882176,
    882177, 882178, 57399, 57463, 57464, 57465, 57466, 57467, 882175, 882174,
    882181, 563902, 563697, 750181, 750182, 563995, 726443, 57444, 566062,
    866421, 866422, 866423, 866418, 564022, 866424, 866425, 866426, 866431,
    866432, 866435, 822264, 822265, 866436, 866437, 866438, 533411, 533456,
    35837, 35838, 617934, 899660, 533440, 912243, 854085, 854086, 30096, 30097,
    31814, 31815, 31816, 31817, 31818, 31819, 31820, 31821, 31822, 31823,
    31824, 31825, 31826, 31827, 31828, 31829, 533395, 533396, 533397, 533398,
    36264, 36265, 608284, 608285, 608286, 608287, 533359, 533360, 533361,
    533362, 533363, 533364, 533365, 533366, 533367, 533368, 533369, 31813,
    533370, 533371, 703425, 703426, 703427, 703405, 703428, 30112, 31081,
    31082, 31083, 703413, 31162, 31163, 31164, 30104, 31165, 31166, 31200,
    533383, 533384, 533385, 533386, 533387, 533388, 533389, 533390, 533391,
    533392, 533393, 533394, 703420, 703421, 703422, 703423, 703424, 703414,
    703415, 703416, 703417, 703418, 703419, 854089, 854090, 854091, 854092,
    854093, 854094, 31837, 31838, 31839, 31840, 531413, 531414, 531415, 531416,
    531417, 531418, 531419, 531420, 531421, 531422, 531423, 531424, 531425,
    942897, 942898, 942899, 942900, 942901, 942902, 531283, 531284, 531285,
    531286, 531287, 531288, 531289, 531290, 531291, 531292, 531293, 531294,
    531295, 531296, 531297, 531298, 531299, 531300, 531301, 531302, 854102,
    854103, 854104, 854105, 854106, 854107, 854108, 854109, 854110, 854111,
    854112, 854113, 854114, 854115, 854116, 854117, 854118, 854119, 854120,
    854121, 854122, 854123, 854124, 854125, 532541, 532542, 532543, 532544,
    532545, 532546, 532547, 532548, 532549, 532550, 532551, 532552, 532553,
    532554, 532555, 532556, 532557, 532558, 532559, 532560, 532561, 532562,
    532563, 532564, 532565, 532566, 97071, 97072, 97073, 97074, 606425, 606429,
    606430, 606431, 606432, 606433, 606434, 820994, 820995, 820996, 820997,
    820998, 820999, 821000, 821001, 821002, 821003, 821004, 821005, 821006,
    821007, 820984, 820985, 820986, 820987, 820988, 820989, 820990, 820991,
    820992, 820993, 821034, 821035, 821036, 737560, 723902, 36275, 723897,
    723903, 723904, 696490, 696491, 696492, 696493, 696494, 696495, 696496,
    696497, 606464, 606465, 606472, 97069, 97070, 740240, 740241, 740242,
    740243, 740244, 740245, 740246, 740247, 740248, 740249, 740250, 740251,
    740252, 740253, 740254, 740255, 740256, 740257, 740258, 740259, 740260,
    740261, 740262, 740263, 740264, 2942, 2943, 2944, 2945, 2946, 2947, 2948,
    2949, 2950, 2951, 2952, 36266, 36267, 36268, 36269, 36270, 727048, 727049,
    727050, 727051, 727052, 727053, 727054, 727055, 727056, 727057, 727058,
    727059, 727060, 727061, 727062, 531360, 727063, 727064, 531337, 727065,
    727066, 727067, 727068, 727069, 727070, 727071, 31811, 31812, 31813,
    533370, 533371, 703425, 703426, 703427, 703405, 703428, 30112, 31081,
    703429, 703430, 703431, 533382, 703432, 703433, 703434, 703435, 703436,
    727075, 533441, 727076, 727077, 727078, 727079, 727080, 727081, 866440,
    866441, 866442, 68315, 68316, 68317, 68318, 68319, 68320, 533541, 533542,
    533543, 533544, 533545, 30634, 30635, 30636, 30637, 57394, 866394, 816461,
    866395, 866396, 585642, 866397, 724118, 724224, 724223, 724222, 724225,
    724226, 585635, 724227, 864088, 866402, 724218, 724221, 546936, 585633,
    724128, 724228, 724229, 724230, 699034, 724113, 68369, 724114, 588427,
    727039, 727040, 727036, 727035, 727020, 727021, 568004, 864047, 727019,
    727018, 727017, 727016, 727015, 727014, 588429, 568010, 727013, 727012,
    534835, 727028, 727029, 727030, 727022, 727031, 35413, 35414, 35415, 35416,
    35417, 890659, 890660, 513554, 34964, 890662, 818105, 818104, 35428, 35429,
    35430, 35431, 35432, 35433, 35434, 35435, 35436, 35437, 846847, 846848,
    846849, 846820, 846821, 846822, 846823, 846824, 846825, 846826, 846827,
    846828, 846829, 846830, 846814, 846831, 846832, 846815, 513525, 56356,
    513526, 513527, 513528, 513529, 513530, 513531, 513532, 513533, 513534,
    513535, 2821, 2822, 2823, 2824, 2825, 2826, 2827, 2828, 2829, 2830, 2831,
    2832, 2833, 2834, 2835, 2836, 2837, 2838, 2839, 2840, 2841, 2842, 2843,
    2844, 2845, 944576, 3311, 944577, 846453, 640480, 846454, 846455, 640485,
    846456, 572770, 846457, 846458, 846459, 711264, 85762, 944595, 640469,
    846449, 846450, 846451, 846452, 880292, 85764, 880293, 880294, 712626,
    880295, 880296, 880297, 864021, 44111, 44110, 44131, 944568, 893115,
    900025, 881184, 820608, 944569, 880274, 944570, 16476, 881575, 881576,
    861880, 881577, 881578, 881579, 46237, 46238, 46239, 46240, 46241, 32067,
    46242, 46243, 46244, 46245, 46246, 46247, 46248, 869839, 869849, 893055,
    893056, 646684, 893057, 646674, 522508, 529478, 529479, 529480, 529481,
    529482, 529483, 529484, 529485, 6649, 46570, 899752, 899753, 40962, 40954,
    471686, 528478, 528479, 528480, 528481, 528474, 528475, 528476, 528477,
    528482, 528483, 528484, 528498, 528499, 528500, 528501, 724344, 866496,
    866497, 866498, 866499, 49973, 49974, 49975, 49976, 819350, 49003, 49004,
    49005, 48999, 11440, 11441, 11442, 32040, 32041, 32029, 32030, 32031,
    32032, 32033, 32034, 32035, 32036, 32042, 32048, 32049, 32050, 32051,
    32052, 32053, 32054, 32056, 32057, 32058, 32059, 50692, 50693, 50694,
    50695, 50698, 50699, 50700, 50701, 50702, 50703, 50704, 50705, 50706,
    24196, 50707, 50708, 50709, 50710, 50711, 50712, 73303, 73304, 73298,
    73299, 73300, 14245, 73301, 73302, 834596, 834597, 834598, 59132, 834599,
    834600, 834601, 834602, 834603, 834604, 834605, 834606, 834607, 834608,
    834609, 10876, 10877, 10845, 10846, 10847, 10848, 10849, 10850, 10851,
    10852, 10865, 10866, 97261, 97262, 97263, 97196, 97264, 97265, 97266,
    97267, 97268, 97269, 97270, 97271, 97247, 97249, 617117, 97287, 97288,
    97289, 72317, 72318, 72319, 72320, 72321, 72322, 72323, 72324, 72325,
    72326, 72327, 72328, 72329, 72330, 68505, 68506, 68507, 68508, 68509,
    68500, 68510, 68511, 68512, 68450, 68513, 68514, 68459, 617116, 617115,
    69265, 68711, 617114, 617113, 68713, 617112, 617111, 617110, 617109, 68593,
    68592, 68591, 68590, 68589, 68588, 68587, 68586, 68585, 68584, 68583,
    68582, 68581, 68464, 68465, 68466, 68467, 20186, 20187, 20188, 20189,
    20172, 28144, 28145, 28146, 28147, 28148, 28149, 28150, 28151, 28152,
    28153, 28154, 20107, 29247, 29248, 29249
])
Example #37
0
from path import Path
import pandas as pd

team_acry = pd.read_csv(Path("../Data/misc_data/team_acry.csv"))

team_rooster = pd.read_csv(Path("../Data/misc_data/updated_team_rooster.csv"))
team_rooster.set_index("Player Name", inplace = True)


def determine_who_wins(starting_pitcher, opposing_pitcher, home_or_away):
    
    
    # gathering pitching data from two inputs in the function
    opp_pitch_class = []
    opp_pitch_class_2 = []
    opp_pitch_class_3 = []
    opp_pitch_class_4 = []
    opp_pitch_class_5 = []
    opp_pitch_class_6 = []
    opp_pitch_class_7 = []
    opp_pitch_class_8 = []
    opp_pitch_class_9 = []
    opp_pitch_class_10 = []
    opp_pitch_class_11 = []
    
    if opposing_pitcher in df_pitch.index:
        opp_pitch_class.append(df_pitch.loc[opposing_pitcher]["ERA"])
        opp_pitch_class_2.append(df_pitch.loc[opposing_pitcher]["CG"])
        opp_pitch_class_3.append(df_pitch.loc[opposing_pitcher]["IP"])
        opp_pitch_class_4.append(df_pitch.loc[opposing_pitcher]["ERA+"])
        opp_pitch_class_5.append(df_pitch.loc[opposing_pitcher]["FIP"])
Example #38
0
def my_program():
    from path import Path
    pwd_path = Path("./NewDir")
    if not pwd_path.isdir():
        pwd_path.mkdir()
    pwd_path = pwd_path / "file.txt"
    if not pwd_path.isfile():
        pwd_path.touch()
    pwd_path.write_text("Writing some_line")
    for line in pwd_path.lines():
        print(line)
Example #39
0
        else:
            break
    return save_index


parser = argparse.ArgumentParser(description='train.py')

# opts.py
opts.model_opts(parser)
opts.train_opts(parser)

opt = parser.parse_args()

opt.save_path = os.path.join(opt.save_dir, 'run.%d' %
                             (get_save_index(opt.save_dir),))
Path(opt.save_path).mkdir_p()

if opt.layers != -1:
    opt.enc_layers = opt.layers
    opt.dec_layers = opt.layers

opt.brnn = (opt.encoder_type == "brnn")
opt.pre_word_vecs = os.path.join(opt.data, 'embedding')

print(vars(opt))
json.dump(opt.__dict__, open(os.path.join(
    opt.save_path, 'opt.json'), 'w'), sort_keys=True, indent=2)

cuda.set_device(opt.gpuid[0])
set_seed(opt.seed)
 def public_module_path(self):
     return Path.path(
         Configuration.current.build_directory.path_by_appending(
             self.name).absolute() + "/" + self.PUBLIC_MODULE_FOLDER_PATH)
Example #41
0
def extension(path):
    return Path(path).ext
Example #42
0
def walk_music_folder(folder: Path):
    for album in folder.dirs():
        yield from album.files()
Example #43
0
 def test_listdir_patterns(self, tmpdir):
     p = Path(tmpdir)
     (p / 'sub').mkdir()
     (p / 'File').touch()
     assert p.listdir('s*') == [p / 'sub']
     assert len(p.listdir('*')) == 2
Example #44
0
def filename(path):
    return Path(path).name
Example #45
0
 def test_listdir_simple(self):
     p = Path('.')
     assert len(p.listdir()) == len(os.listdir('.'))
Example #46
0
 def create_reference(cls, tmpdir):
     p = Path(tmpdir) / 'document'
     with p.open('w') as stream:
         stream.write(cls.reference_content)
     return p
Example #47
0
 def test_fnmatch_custom_mod(self):
     p = Path('FooBar')
     p.module = ntpath
     assert p.fnmatch('foobar')
     assert p.fnmatch('FOO[ABC]AR')
Example #48
0
 def test_listdir_empty_pattern(self):
     p = Path('.')
     assert p.listdir('') == []
Example #49
0
 def test_walkdirs_with_unicode_name(self):
     p = Path(self.tempdir)
     for res in p.walkdirs():
         pass
Example #50
0
 def test_fnmatch_custom_normcase(self):
     normcase = lambda path: path.upper()
     p = Path('FooBar')
     assert p.fnmatch('foobar', normcase=normcase)
     assert p.fnmatch('FOO[ABC]AR', normcase=normcase)
Example #51
0
    def test_chdir_or_cd(self, tmpdir):
        """ tests the chdir or cd method """
        d = Path(str(tmpdir))
        cwd = d.getcwd()

        # ensure the cwd isn't our tempdir
        assert str(d) != str(cwd)
        # now, we're going to chdir to tempdir
        d.chdir()

        # we now ensure that our cwd is the tempdir
        assert str(d.getcwd()) == str(tmpdir)
        # we're resetting our path
        d = Path(cwd)

        # we ensure that our cwd is still set to tempdir
        assert str(d.getcwd()) == str(tmpdir)

        # we're calling the alias cd method
        d.cd()
        # now, we ensure cwd isn'r tempdir
        assert str(d.getcwd()) == str(cwd)
        assert str(d.getcwd()) != str(tmpdir)
Example #52
0
 def test_fnmatch_simple(self):
     p = Path('FooBar')
     assert p.fnmatch('Foo*')
     assert p.fnmatch('Foo[ABC]ar')
Example #53
0
    def testShutil(self):
        # Note: This only tests the methods exist and do roughly what
        # they should, neglecting the details as they are shutil's
        # responsibility.

        d = Path(self.tempdir)
        testDir = d / 'testdir'
        testFile = testDir / 'testfile.txt'
        testA = testDir / 'A'
        testCopy = testA / 'testcopy.txt'
        testLink = testA / 'testlink.txt'
        testB = testDir / 'B'
        testC = testB / 'C'
        testCopyOfLink = testC / testA.relpathto(testLink)

        # Create test dirs and a file
        testDir.mkdir()
        testA.mkdir()
        testB.mkdir()

        f = open(testFile, 'w')
        f.write('x' * 10000)
        f.close()

        # Test simple file copying.
        testFile.copyfile(testCopy)
        self.assert_(testCopy.isfile())
        self.assert_(testFile.bytes() == testCopy.bytes())

        # Test copying into a directory.
        testCopy2 = testA / testFile.name
        testFile.copy(testA)
        self.assert_(testCopy2.isfile())
        self.assert_(testFile.bytes() == testCopy2.bytes())

        # Make a link for the next test to use.
        if hasattr(os, 'symlink'):
            testFile.symlink(testLink)
        else:
            testFile.copy(testLink)  # fallback

        # Test copying directory tree.
        testA.copytree(testC)
        self.assert_(testC.isdir())
        self.assertSetsEqual(
            testC.listdir(),
            [testC / testCopy.name, testC / testFile.name, testCopyOfLink])
        self.assert_(not testCopyOfLink.islink())

        # Clean up for another try.
        testC.rmtree()
        self.assert_(not testC.exists())

        # Copy again, preserving symlinks.
        testA.copytree(testC, True)
        self.assert_(testC.isdir())
        self.assertSetsEqual(
            testC.listdir(),
            [testC / testCopy.name, testC / testFile.name, testCopyOfLink])
        if hasattr(os, 'symlink'):
            self.assert_(testCopyOfLink.islink())
            self.assert_(testCopyOfLink.readlink() == testFile)

        # Clean up.
        testDir.rmtree()
        self.assert_(not testDir.exists())
        self.assertList(d.listdir(), [])
Example #54
0
 def testConstructionFromInt(self):
     """
     Path class will construct a path as a string of the number
     """
     self.assert_(Path(1) == '1')
Example #55
0
    def testListing(self):
        d = Path(self.tempdir)
        self.assertEqual(d.listdir(), [])

        f = 'testfile.txt'
        af = d / f
        self.assertEqual(af, os.path.join(d, f))
        af.touch()
        try:
            self.assert_(af.exists())

            self.assertEqual(d.listdir(), [af])

            # .glob()
            self.assertEqual(d.glob('testfile.txt'), [af])
            self.assertEqual(d.glob('test*.txt'), [af])
            self.assertEqual(d.glob('*.txt'), [af])
            self.assertEqual(d.glob('*txt'), [af])
            self.assertEqual(d.glob('*'), [af])
            self.assertEqual(d.glob('*.html'), [])
            self.assertEqual(d.glob('testfile'), [])
        finally:
            af.remove()

        # Try a test with 20 files
        files = [d / ('%d.txt' % i) for i in range(20)]
        for f in files:
            fobj = open(f, 'w')
            fobj.write('some text\n')
            fobj.close()
        try:
            files2 = d.listdir()
            files.sort()
            files2.sort()
            self.assertEqual(files, files2)
        finally:
            for f in files:
                try:
                    f.remove()
                except:
                    pass
Example #56
0
    def testUnicode(self):
        d = Path(self.tempdir)
        p = d / 'unicode.txt'

        def test(enc):
            """ Test that path works with the specified encoding,
            which must be capable of representing the entire range of
            Unicode codepoints.
            """

            given = u('Hello world\n'
                      '\u0d0a\u0a0d\u0d15\u0a15\r\n'
                      '\u0d0a\u0a0d\u0d15\u0a15\x85'
                      '\u0d0a\u0a0d\u0d15\u0a15\u2028'
                      '\r'
                      'hanging')
            clean = u('Hello world\n'
                      '\u0d0a\u0a0d\u0d15\u0a15\n'
                      '\u0d0a\u0a0d\u0d15\u0a15\n'
                      '\u0d0a\u0a0d\u0d15\u0a15\n'
                      '\n'
                      'hanging')
            givenLines = [
                u('Hello world\n'),
                u('\u0d0a\u0a0d\u0d15\u0a15\r\n'),
                u('\u0d0a\u0a0d\u0d15\u0a15\x85'),
                u('\u0d0a\u0a0d\u0d15\u0a15\u2028'),
                u('\r'),
                u('hanging')
            ]
            expectedLines = [
                u('Hello world\n'),
                u('\u0d0a\u0a0d\u0d15\u0a15\n'),
                u('\u0d0a\u0a0d\u0d15\u0a15\n'),
                u('\u0d0a\u0a0d\u0d15\u0a15\n'),
                u('\n'),
                u('hanging')
            ]
            expectedLines2 = [
                u('Hello world'),
                u('\u0d0a\u0a0d\u0d15\u0a15'),
                u('\u0d0a\u0a0d\u0d15\u0a15'),
                u('\u0d0a\u0a0d\u0d15\u0a15'),
                u(''),
                u('hanging')
            ]

            # write bytes manually to file
            f = codecs.open(p, 'w', enc)
            f.write(given)
            f.close()

            # test all 3 path read-fully functions, including
            # path.lines() in unicode mode.
            self.assertEqual(p.bytes(), given.encode(enc))
            self.assertEqual(p.text(enc), clean)
            self.assertEqual(p.lines(enc), expectedLines)
            self.assertEqual(p.lines(enc, retain=False), expectedLines2)

            # If this is UTF-16, that's enough.
            # The rest of these will unfortunately fail because append=True
            # mode causes an extra BOM to be written in the middle of the file.
            # UTF-16 is the only encoding that has this problem.
            if enc == 'UTF-16':
                return

            # Write Unicode to file using path.write_text().
            cleanNoHanging = clean + u('\n')  # This test doesn't work with a
            # hanging line.
            p.write_text(cleanNoHanging, enc)
            p.write_text(cleanNoHanging, enc, append=True)
            # Check the result.
            expectedBytes = 2 * cleanNoHanging.replace('\n',
                                                       os.linesep).encode(enc)
            expectedLinesNoHanging = expectedLines[:]
            expectedLinesNoHanging[-1] += '\n'
            self.assertEqual(p.bytes(), expectedBytes)
            self.assertEqual(p.text(enc), 2 * cleanNoHanging)
            self.assertEqual(p.lines(enc), 2 * expectedLinesNoHanging)
            self.assertEqual(p.lines(enc, retain=False), 2 * expectedLines2)

            # Write Unicode to file using path.write_lines().
            # The output in the file should be exactly the same as last time.
            p.write_lines(expectedLines, enc)
            p.write_lines(expectedLines2, enc, append=True)
            # Check the result.
            self.assertEqual(p.bytes(), expectedBytes)

            # Now: same test, but using various newline sequences.
            # If linesep is being properly applied, these will be converted
            # to the platform standard newline sequence.
            p.write_lines(givenLines, enc)
            p.write_lines(givenLines, enc, append=True)
            # Check the result.
            self.assertEqual(p.bytes(), expectedBytes)

            # Same test, using newline sequences that are different
            # from the platform default.
            def testLinesep(eol):
                p.write_lines(givenLines, enc, linesep=eol)
                p.write_lines(givenLines, enc, linesep=eol, append=True)
                expected = 2 * cleanNoHanging.replace(u('\n'), eol).encode(enc)
                self.assertEqual(p.bytes(), expected)

            testLinesep(u('\n'))
            testLinesep(u('\r'))
            testLinesep(u('\r\n'))
            testLinesep(u('\x0d\x85'))

            # Again, but with linesep=None.
            p.write_lines(givenLines, enc, linesep=None)
            p.write_lines(givenLines, enc, linesep=None, append=True)
            # Check the result.
            expectedBytes = 2 * given.encode(enc)
            self.assertEqual(p.bytes(), expectedBytes)
            self.assertEqual(p.text(enc), 2 * clean)
            expectedResultLines = expectedLines[:]
            expectedResultLines[-1] += expectedLines[0]
            expectedResultLines += expectedLines[1:]
            self.assertEqual(p.lines(enc), expectedResultLines)

        test('UTF-8')
        test('UTF-16BE')
        test('UTF-16LE')
        test('UTF-16')
Example #57
0
 def testMkdirReturnsSelf(self):
     p = Path(self.tempdir) / "newdir"
     ret = p.mkdir()
     self.assertEquals(p, ret)
Example #58
0
    def testRelpath(self):
        root = Path(p(nt='C:\\', posix='/'))
        foo = root / 'foo'
        quux = foo / 'quux'
        bar = foo / 'bar'
        boz = bar / 'Baz' / 'Boz'
        up = Path(os.pardir)

        # basics
        self.assertEqual(root.relpathto(boz),
                         Path('foo') / 'bar' / 'Baz' / 'Boz')
        self.assertEqual(bar.relpathto(boz), Path('Baz') / 'Boz')
        self.assertEqual(quux.relpathto(boz), up / 'bar' / 'Baz' / 'Boz')
        self.assertEqual(boz.relpathto(quux), up / up / up / 'quux')
        self.assertEqual(boz.relpathto(bar), up / up)

        # Path is not the first element in concatenation
        self.assertEqual(root.relpathto(boz),
                         'foo' / Path('bar') / 'Baz' / 'Boz')

        # x.relpathto(x) == curdir
        self.assertEqual(root.relpathto(root), os.curdir)
        self.assertEqual(boz.relpathto(boz), os.curdir)
        # Make sure case is properly noted (or ignored)
        self.assertEqual(boz.relpathto(boz.normcase()), os.curdir)

        # relpath()
        cwd = Path(os.getcwd())
        self.assertEqual(boz.relpath(), cwd.relpathto(boz))

        if os.name == 'nt':
            # Check relpath across drives.
            d = Path('D:\\')
            self.assertEqual(d.relpathto(boz), boz)
Example #59
0
 def testRenameReturnsSelf(self):
     p = Path(self.tempdir) / "somefile"
     p.touch()
     target = Path(self.tempdir) / "otherfile"
     ret = p.rename(target)
     self.assertEquals(target, ret)
Example #60
0
 def testTouchReturnsSelf(self):
     p = Path(self.tempdir) / "empty file"
     ret = p.touch()
     self.assertEquals(p, ret)