Example #1
0
def save_numpy_object(obj, output_path, if_file_exists, name='file'):
    """Utility to save a numpy object

    Parameters
    ----------
    obj: numpy.ndarray
        Object to save

    output_path: str
        Where to save the file

    if_file_exists: str, optional
        One of 'overwrite', 'abort', 'skip'. If 'overwrite' it replaces the
        file if it exists, if 'abort' if raise a ValueError exception if
        the file exists, if 'skip' if skips the operation if the file
        exists

    name: str, optional
        Name (just used for logging messages)
    """
    logger = logging.getLogger(__name__)
    output_path = Path(output_path)

    if output_path.exists() and if_file_exists == 'abort':
        raise ValueError('{} already exists'.format(output_path))
    elif output_path.exists() and if_file_exists == 'skip':
        logger.info('{} already exists, skipping...'.format(output_path))
    else:
        np.save(str(output_path), obj)
        logger.info('Saved {} in {}'.format(name, output_path))
Example #2
0
def test_summary_writer(tmp_path):
    logfile = Path(tmp_path / 'scalars.json')

    assert not logfile.exists()
    writer = SummaryWriter(log_dir=str(tmp_path))
    writer.add_scalar("test_value", 0.5, 1)
    writer.add_scalar("test_value", 2.5, 2)
    writer.add_scalar("test_value2", 123, 1)
    writer.flush()
    assert logfile.exists()

    writer = SummaryWriter(log_dir=str(tmp_path))

    assert "test_value" in writer._summary
    assert "test_value2" in writer._summary
    assert len(writer._summary["test_value"]) == 2

    writer.add_scalar("test_value", 123.4, 3)
    writer.close()

    writer = SummaryWriter(log_dir=str(tmp_path))

    assert "test_value" in writer._summary
    assert "test_value2" in writer._summary
    assert len(writer._summary["test_value"]) == 3
Example #3
0
def get_new_path(path):
    """ Return a path to a file, creat its parent folder if it doesn't exist, creat new one if existing.

    If the folder/file already exists, this function will use `path`_`idx` as new name, and make
    corresponding folder if `path` is a folder.
    idx will starts from 1 and keeps +1 until find a valid name without occupation.

    If the folder and its parent folders don't exist, keeps making these series of folders.

    Args:
        path: The path of a file/folder.
    Returns:
        _ : The guaranteed new path of the folder/file.
    """
    path = Path(path)
    root = Path(*path.parts[:-1])

    if not root.exists():
        root.mkdir(parents=True, exist_ok=True)

    if not path.exists():
        new_path = path
        if new_path.suffix == '':
            new_path.mkdir()
    else:
        idx = 1
        while True:
            stem = path.stem + "_" + str(idx)
            new_path = root / (stem + path.suffix)
            if not new_path.exists():
                if new_path.suffix == '':
                    new_path.mkdir()
                break
            idx += 1
    return str(new_path)
Example #4
0
def import_datasources(path: str, sync: str, recursive: bool) -> None:
    """Import datasources from YAML"""
    from superset.utils import dict_import_export

    sync_array = sync.split(",")
    path_object = Path(path)
    files = []
    if path_object.is_file():
        files.append(path_object)
    elif path_object.exists() and not recursive:
        files.extend(path_object.glob("*.yaml"))
        files.extend(path_object.glob("*.yml"))
    elif path_object.exists() and recursive:
        files.extend(path_object.rglob("*.yaml"))
        files.extend(path_object.rglob("*.yml"))
    for file_ in files:
        logger.info("Importing datasources from file %s", file_)
        try:
            with file_.open() as data_stream:
                dict_import_export.import_from_dict(
                    db.session, yaml.safe_load(data_stream), sync=sync_array)
        except Exception as ex:  # pylint: disable=broad-except
            logger.error("Error when importing datasources from file %s",
                         file_)
            logger.error(ex)
    def plot_section(self):
        # self.matplotlib_widget.fig.clf()
        colormap = self.control_widget.colormap_ComboBox.currentText()
        if self.control_widget.wiggle_CheckBox.checkState() == Qt.Checked:
            kind = 'vawt'
        else:
            kind = 'img'

        ax = self.matplotlib_widget.axes
        ax.cla()
        for idx in range(self.control_widget.data_listWidget.count()):
            # for data_item in self.control_widget.data_listWidget.items:
            item = self.control_widget.data_listWidget.item(idx)
            if self.control_widget.il_radioButton.isChecked() is True:
                if item.checkState() == Qt.Checked:
                    data_path = Path(CONF.data_root) / CONF.current_survey / \
                        "Seismics" / ".{}".format(item.text())
                    if data_path.exists() is True:
                        if not hasattr(self, "data_{}".format(item.text())):
                            # check if data has already been loaded
                            # create new seis object if not
                            self.new_seis_object(item.text())
                        self.status.emit("Reading data ...")
                        seis_object = getattr(self,
                                              "data_{}".format(item.text()))
                        self.status.emit("Plotting ...")
                        seis_object.plot(ppp.InlineIndex(
                            self.control_widget.inline_SpinBox.value()),
                                         ax,
                                         kind=kind,
                                         cm=colormap)
                        self.matplotlib_widget.fig.canvas.draw()
                        self.status.emit("")
                    else:
                        self.statusBar().showMessage(
                            "can not find data file {}".format(item.text))

            elif self.control_widget.cl_radioButton.isChecked() is True:
                if item.checkState() == Qt.Checked:
                    data_path = Path(CONF.data_root) / CONF.current_survey / \
                        "Seismics" / ".{}".format(item.text())
                    if data_path.exists() is True:
                        if not hasattr(self, "data_{}".format(item.text())):
                            # check if data has already been loaded
                            # create new seis object if not
                            self.new_seis_object(item.text())
                        self.status.emit("Reading data ...")
                        seis_object = getattr(self,
                                              "data_{}".format(item.text()))
                        self.status.emit("Plotting ...")
                        seis_object.plot(ppp.CrlineIndex(
                            self.control_widget.crline_SpinBox.value()),
                                         ax,
                                         kind=kind,
                                         cm=colormap)
                        self.matplotlib_widget.fig.canvas.draw()
                        self.status.emit("")
                    else:
                        self.statusBar().showMessage(
                            "can not find data file {}".format(item.text))
Example #6
0
 def _skip_test_access_by_naver(self):
     test_data = '21149144.naver'
     lp, is_created = LegacyPlace.get_or_create_smart(test_data)
     path = Path(lp.path_accessed)
     if path.exists():
         path.unlink()
     self.assertEqual(path.exists(), False)
     lp.access()
     self.assertEqual(path.exists(), True)
Example #7
0
 def test_access_by_kakao(self):
     test_data = '14720610.kakao'
     lp, is_created = LegacyPlace.get_or_create_smart(test_data)
     path = Path(lp.path_accessed)
     if path.exists():
         path.unlink()
     self.assertEqual(path.exists(), False)
     lp.access()
     self.assertEqual(path.exists(), True)
Example #8
0
 def test_access_by_mango(self):
     test_data = 'f-YvkBx8IemC.mango'
     lp, is_created = LegacyPlace.get_or_create_smart(test_data)
     path = Path(lp.path_accessed)
     if path.exists():
         path.unlink()
     self.assertEqual(path.exists(), False)
     lp.access()
     self.assertEqual(path.exists(), True)
Example #9
0
 def test_access_by_google(self):
     if WORK_ENVIRONMENT: return
     test_data = 'ChIJs1Et3lYABDQR32tSk7gPEK4.google'
     lp, is_created = LegacyPlace.get_or_create_smart(test_data)
     path = Path(lp.path_accessed)
     if path.exists():
         path.unlink()
     self.assertEqual(path.exists(), False)
     lp.access()
     self.assertEqual(path.exists(), True)
Example #10
0
 def test_access_by_4square(self):
     if WORK_ENVIRONMENT: return
     test_data = '4ccffc63f6378cfaace1b1d6.4square'
     lp, is_created = LegacyPlace.get_or_create_smart(test_data)
     path = Path(lp.path_accessed)
     if path.exists():
         path.unlink()
     self.assertEqual(path.exists(), False)
     lp.access_force()
     self.assertEqual(path.exists(), True)
Example #11
0
    def __skip__test_access_methods(self):
        test_data = '능이백숙 국물 죽이네~ ㅎㅎ'
        pnote, is_created = PlaceNote.get_or_create_smart(test_data)

        path = Path(pnote.path_accessed)
        if path.exists():
            path.unlink()

        self.assertEqual(path.exists(), False)
        pnote.access_force()
        self.assertEqual(path.exists(), True)
Example #12
0
    def __skip__test_access_methods(self):
        test_data = '경기도 하남시 풍산로 270, 206동 402호 (선동, 미사강변도시2단지)'
        addr, is_created = Address.get_or_create_smart(test_data)

        path = Path(addr.path_accessed)
        if path.exists():
            path.unlink()

        self.assertEqual(path.exists(), False)
        addr.access_force()
        self.assertEqual(path.exists(), True)
Example #13
0
    def __skip__test_access_methods(self):
        test_data = '자기랑 진우랑 찰칵~ ^^'
        inote, is_created = ImageNote.get_or_create_smart(test_data)

        path = Path(inote.path_accessed)
        if path.exists():
            path.unlink()

        self.assertEqual(path.exists(), False)
        inote.access_force()
        self.assertEqual(path.exists(), True)
Example #14
0
    def __skip__test_access_methods(self):
        test_data = '031-724-2733'
        phone, is_created = PhoneNumber.get_or_create_smart(test_data)

        path = Path(phone.path_accessed)
        if path.exists():
            path.unlink()

        self.assertEqual(path.exists(), False)
        phone.access_force()
        self.assertEqual(path.exists(), True)
Example #15
0
    def __skip__test_access_methods(self):
        test_data = '관심'
        tname, is_created = TagName.get_or_create_smart(test_data)

        path = Path(tname.path_accessed)
        if path.exists():
            path.unlink()

        self.assertEqual(path.exists(), False)
        tname.access_force()
        self.assertEqual(path.exists(), True)
Example #16
0
def build_data_repo_structure(args):
    root = Path(args.root)
    if not root.exists():
        root.mkdir()
    data_path = root / args.name
    if not data_path.exists():
        data_path.mkdir()
    temp = Path(os.getcwd(), "temp")
    if not temp.exists():
        temp.mkdir()
    return data_path
Example #17
0
class MPIFIExpr(object):
    def __init__(self, expr, exe, args, hosts, faults, nprocesses):
        self._path = Path(expr).absolute()
        if not self._path.exists():
            self._path.mkdir()

        self._exe = Path(exe).absolute()
        assert(self._exe.exists()), "Executable (%s) not found" % self._exe
        assert(self._exe.is_file()), "Executable (%s) is not a file" % self._exe
        self._exe = str(self._exe)

        self._args = args
        self._hosts = hosts
        self._faults = load_faults(faults)
        self._nprocesses = nprocesses

    def run(self):
        libcare = Path(os.environ['CARE_ROOT']).joinpath(
            'build/runtime/libCARERuntime.so').absolute()
        assert(libcare.exists()), "the recovery runtime library is not setup yet!"

        libmpifi = Path(os.environ['CARE_ROOT']).joinpath(
            'tools/MPIFI/libmpifi.so').absolute()
        assert(libmpifi.exists()), "the MPIFI library is not setup yet!"

        CARE = "%s:%s" % (str(libcare), str(libmpifi))
        # CARE = str(libmpifi)

        os.environ['CARE_WORKER_ID'] = str(0)
        for f in self._faults:
            print("Performing fault: Addr -- %s, REG -- %s, Fvalue -- %d (%s)" %
                  (f['ADDR'], f['REG'], f['VALUE'], str(f['VALUE'])))
            fid = 'mpifi-inject-%03d' % self._faults.index(f)

            os.environ['CARE_EXPR_PATH'] = str(self._path)
            os.environ['CARE_INJECTION_ID'] = fid
            os.environ['CARE_TARGET_REG'] = f["REG"].upper()
            os.environ['CARE_TARGET_ADDR'] = f["ADDR"]
            os.environ['CARE_TARGET_DATA'] = str(f["VALUE"])

            wd = self._path.joinpath(fid)
            if not wd.exists():
                wd.mkdir()

            wd = str(wd)
            os.chdir(wd)

            app = MPIApp(self._exe, self._args,
                         self._nprocesses, self._hosts, CARE)
            app.start()
            app.wait(1200)

            os.chdir(str(self._path))
Example #18
0
def test_provision_missing(initproj, cmd):
    initproj(
        "pkg123-0.7",
        filedefs={
            "tox.ini": """\
                [tox]
                skipsdist=True
                minversion = 3.7.0
                requires =
                    setuptools == 40.6.3
                [testenv]
                commands=python -c "import sys; print(sys.executable); raise SystemExit(1)"
            """,
        },
    )
    result = cmd("-e", "py")
    result.assert_fail()
    assert "tox.exception.InvocationError" not in result.output()
    assert not result.err
    assert ".tox create: " in result.out
    assert ".tox installdeps: " in result.out
    assert "py create: " in result.out

    at = next(at for at, l in enumerate(result.outlines) if l.startswith("py run-test: ")) + 1
    meta_python = Path(result.outlines[at])
    assert meta_python.exists()
Example #19
0
    def _read_recursive(self, conf_root, verbose=True):
        conf = ConfigTree()
        if not conf_root:
            return conf
        conf_root = Path(conf_root)

        if not conf_root.exists():
            if verbose:
                print("No config in %s" % str(conf_root))
            return conf

        if verbose:
            print("Loading config from %s" % str(conf_root))
        for root, dirs, files in os.walk(str(conf_root)):

            rel_dir = str(Path(root).relative_to(conf_root))
            if rel_dir == ".":
                rel_dir = ""
            prefix = rel_dir.replace("/", ".")

            for filename in files:
                if not is_config_file(filename):
                    continue

                if prefix != "":
                    key = prefix + "." + Path(filename).stem
                else:
                    key = Path(filename).stem

                file_path = str(Path(root) / filename)

                conf.put(key, self._read_single_file(file_path,
                                                     verbose=verbose))

        return conf
def new_page():
    from string import Template     # Use Python templates, not Mako templates

    slug = raw_input('Slug for page: ')
    title = raw_input('Title of page: ')
    template = raw_input('Template to inherit from (default is example.html): ')

    new_dir = Path('site') / slug
    if new_dir.exists():
        print '\nDirectory %s already exists, aborting' % new_dir
        return
    new_dir.mkdir()

    html_file = new_dir / 'index.html'
    with html_file.open('w') as fp:
        fp.write(Template(NEW_PAGE_HTML_TEMPLATE).substitute(
            title=repr(title.strip()), template=template.strip() or 'example.html'))

    js_file = new_dir / 'app.es6'
    with js_file.open('w') as fp:
        class_name = ''.join(s.capitalize() for s in title.split(' '))
        fp.write(Template(NEW_PAGE_JS_TEMPLATE).substitute(
            title=title, class_name=class_name))

    marker = '// This comment marks where new entry points will be added'
    new_entry = "'%s': './site/%s/app.es6'," % (slug, slug)
    code = open('webpack.config.js').read()
    with open('webpack.config.js', 'w') as fp:
        fp.write(code.replace(marker, new_entry + '\n    ' + marker))
Example #21
0
def create_repo_from_spec(spec_path, top_path, repo_path):
    """
    Invoke the prep phase of rpmbuild to generate a source directory then
    create a git repo from it
    """
    top_dir = top_path.resolve()
    cmd = ['rpmbuild', '-bp', '--nodeps',
           '--define', '_topdir '+str(top_dir), str(spec_path)]
    logging.debug("Running %s", ' '.join(cmd))
    subprocess.check_call(cmd)

    # move the created build directory under the repo directory
    build_path = list(Path(top_path, 'BUILD').glob('*'))[0]
    rename(str(build_path), str(repo_path))

    git_dir = Path(repo_path, '.git')
    if git_dir.exists():
        # setup already created a git repo
        repo = git.Repo(str(repo_path))
    else:
        repo = git.Repo.init(str(repo_path))
        index = repo.index
        index.add(repo.untracked_files)
        index.commit("Repo generated by planex-clone")

    return repo
Example #22
0
    def table(self,
              input_table,
              output_workbook,
              use_field_alias_as_column_header=False):
        if not arcpy.Exists(input_table):
            raise ValueError("input_table does not exist.")

        output_workbook = Path(output_workbook).resolve()

        if output_workbook.exists():
            raise ValueError("output_table already exists.")

        # get input feature class description for copy process
        d = arcpy.Describe(input_table)

        if not d.dataType == "Table":
            raise ValueError("input_table is not of type 'Table'.")

        output_workbook = xlsxwriter.Workbook(str(output_workbook))

        sheet_name = self._table_default_name(d, output_workbook)

        self._table(d, output_workbook, sheet_name,
                    use_field_alias_as_column_header)

        output_workbook.close()
Example #23
0
def configure_ssh(ssh_key_secret):
    if ssh_key_secret is None:
        yield

    # If we get here, we are runnig in automation.
    # We use a user hgrc, so that we are also get the system-wide hgrc
    # settings.
    hgrc = Path(user_config_dir("hg")).joinpath("hgrc")
    if hgrc.exists():
        raise FailedCommandError(
            "Not overwriting `{}`; cannot configure ssh.".format(hgrc)
        )

    try:
        ssh_key_dir = Path(tempfile.mkdtemp())

        ssh_key = get_secret(ssh_key_secret)
        ssh_key_file = ssh_key_dir.joinpath("id_rsa")
        ssh_key_file.write_text(ssh_key["ssh_privkey"])
        ssh_key_file.chmod(0o600)

        hgrc_content = (
            "[ui]\n"
            "username = trybld\n"
            "ssh = ssh -i {path} -l {user}\n".format(
                path=ssh_key_file, user=ssh_key["user"]
            )
        )
        hgrc.write_text(hgrc_content)

        yield
    finally:
        shutil.rmtree(str(ssh_key_dir))
        os.remove(str(hgrc))
Example #24
0
def main(src, dest):
    """links configfiles from one folder to another

    if links exists it verifies content
    if files exist at the target side it errors

    Args:
        src: source folder
        dest: target folder
    """
    src = Path(src)
    if not src.exists():
        print("WARNING:", src, "does not exist, skipping linking")
        return

    dest = Path(dest)

    for element in filter(_is_yaml_file, src.iterdir()):
        _warn_on_unknown_encryption(element)
        target = dest.joinpath(element.name)
        # the following is fragile
        if target.is_symlink():
            _warn_on_missmatching_symlink(src=element, target=target)
        elif target.is_file():
            _warn_on_existing_file(target)
        else:
            target.symlink_to(element.resolve())
Example #25
0
def test_copy_or_create(conf_dir):
    src_file = Path(conf_dir, "temp.x.in")
    dst_file = Path(conf_dir, "temp.x.out")

    # file doesn't exist, check file created
    msi_update.copy_or_create(src_file, dst_file, u"!!!")
    assert dst_file.exists()
    content = dst_file.read_text()
    assert content == "!!!"

    # files exists check file copied
    src_file.write_text(u"+++")
    msi_update.copy_or_create(src_file, dst_file, u"!!!")
    assert dst_file.exists()
    content = dst_file.read_text()
    assert content == "+++"
Example #26
0
class JobStatus(object):
    state_initialized = "initialized"
    state_running = "running"
    state_finished = "finished"
    state_stopped = "stopped"
    state_exception = "exception"

    def __init__(self, work_dir):
        super(JobStatus, self).__init__()
        self._work_dir = work_dir
        self._jobstatus_path = Path(
            work_dir) / BackgroundJobDefines.jobstatus_filename

        self._progress_update_path = Path(
            work_dir) / BackgroundJobDefines.progress_update_filename
        self._result_message_path = Path(
            work_dir) / BackgroundJobDefines.result_message_filename
        self._exceptions_path = Path(
            work_dir) / BackgroundJobDefines.exceptions_filename

    def get_status(self):
        data = store.load_data_from_file(str(self._jobstatus_path), default={})

        data.setdefault("state", JobStatus.state_initialized)
        data.setdefault("duration", 0.0)
        data.setdefault("pid", None)

        data["loginfo"] = {}
        for field_id, field_path in [("JobProgressUpdate",
                                      self._progress_update_path),
                                     ("JobResult", self._result_message_path),
                                     ("JobException", self._exceptions_path)]:
            if field_path.exists():  # pylint: disable=no-member
                data["loginfo"][field_id] = file(
                    str(field_path)).read().splitlines()
            else:
                data["loginfo"][field_id] = []

        return data

    def statusfile_exists(self):
        return self._jobstatus_path.exists()  # pylint: disable=no-member

    def update_status(self, params):
        if not self._jobstatus_path.parent.exists():  # pylint: disable=no-member
            return

        if params:
            try:
                status = store.load_data_from_file(str(self._jobstatus_path),
                                                   {},
                                                   lock=True)
                status.update(params)
                store.save_mk_file(str(self._jobstatus_path),
                                   self._format_value(status))
            finally:
                store.release_lock(str(self._jobstatus_path))

    def _format_value(self, value):
        return pprint.pformat(value)
Example #27
0
def viz(target, config):

    viz_path = Path(target) / 'viz'

    if not viz_path.exists():
        print('librec-auto: viz directory missing. Creating. ', target)
        os.makedirs(str(viz_path))

    python_path = Path('venv/bin/python')
    win_python_path = Path(
        'venv/Scripts/python.exe')  # Very annoying that I have to do this

    if python_path.is_file():
        cmd = [python_path.as_posix(), config.get_post_script(), target]
        subprocess.call(cmd)
    elif win_python_path.is_file():
        cmd = [
            str(win_python_path.as_posix()),
            config.get_post_script(), target
        ]
        subprocess.call(cmd)
    else:
        print(
            "Python virtual environment not available at venv. Please install to use this function."
        )
Example #28
0
def get_datafile():
    """Create the path to the data file used to store entry points."""
    config = get_config()

    pkg_path_filename = make_data_file_name()
    datafile = Path(config.get('general', 'datadir')).joinpath(pkg_path_filename)
    if datafile.exists():  # pylint: disable=no-member
        return str(datafile)  # if the unhashed exists, continue to use that one

    pkg_path_filename = hashed_data_file_name()
    datafile = Path(config.get('general', 'datadir')).joinpath(pkg_path_filename)
    if not datafile.exists():  # pylint: disable=no-member
        datafile.parent.mkdir(parents=True, exist_ok=True)
        datafile.write_text(u'{}')

    return str(datafile)
Example #29
0
    def populate_treeWidget(self):
        survey_file = Path(CONF.data_root, CONF.current_survey, '.survey')
        if survey_file.exists():
            self.DataTree.clear()
            self.DataTree.setHeaderLabel(CONF.current_survey)
            # populate seismic data
            seis_data = QTreeWidgetItem(self.DataTree)
            seis_data.setText(0, 'Seismic')
            for item in get_data_files(
                    Path(CONF.data_root, CONF.current_survey, "Seismics")):
                f3 = QTreeWidgetItem(seis_data)
                f3.setFlags(f3.flags() | Qt.ItemIsUserCheckable)
                f3.setText(0, item)
                f3.setCheckState(0, Qt.Unchecked)
            # populate well data
            well_data = QTreeWidgetItem(self.DataTree)
            well_data.setText(0, 'Wells')
            for item in get_data_files(
                    Path(CONF.data_root, CONF.current_survey, "Wellinfo")):
                f3 = QTreeWidgetItem(well_data)
                f3.setFlags(f3.flags() | Qt.ItemIsUserCheckable)
                f3.setText(0, item)
                f3.setCheckState(0, Qt.Unchecked)
            # populate surface data
            surface_data = QTreeWidgetItem(self.DataTree)
            surface_data.setText(0, 'Surfaces')
            for item in get_data_files(
                    Path(CONF.data_root, CONF.current_survey, "Surfaces")):
                f3 = QTreeWidgetItem(surface_data)
                f3.setFlags(f3.flags() | Qt.ItemIsUserCheckable)
                f3.setText(0, item)
                f3.setCheckState(0, Qt.Unchecked)

            self.DataTree.show()
Example #30
0
    def convert_to_csv_from_folder(self,
                                   dat_folder: str,
                                   csv_folder: Optional[str] = None) -> None:
        """Convert all dat in a folder to csv

        Iterate over the given folder
        Then normalize each .dat file found to .csv

        :param dat_folder: folder containing .dat files
        :param csv_folder: folder in which store CSV
        :return: None
        """
        folder = Path(dat_folder)

        if not folder.exists() \
                or not folder.is_dir():
            raise BadFileFormatException

        if not csv_folder:
            csv_folder = self.DEFAULT_OUTPUT_FOLDER

        for file in folder.iterdir():
            if file.suffix != Dat.ext:
                continue

            self.convert_to_csv(
                dat_path=str(file),
                csv_path=f'{csv_folder}{file.name.replace(Dat.ext, Csv.ext)}')
Example #31
0
def _expand_path(path):
    """Expand a server path that may contain symbolic links
    """
    subbed = Path(re.sub(r'^/\~(.*?)/', r'/home/\1/public_html/', path))
    resolved = subbed.resolve() if subbed.exists() else subbed
    return re.sub(r'^/home/(.*?)/public_html/', r'/~\1/',
                  str(resolved) if resolved.exists() else path)
def search_urls(urls, cur_path):
    file_path = args.dirctory + "/" + args.input_file
    line_num = 0
    with open(file_path, "r") as in_file:
        for line in in_file:
            if line_num >= 2:
                line = line.strip().split(
                    "\t"
                )  # Remove the leading and trailing Spaces and split the data with "\t"
                if line[6] == args.taxid:
                    urls.append(line[19])
                else:
                    continue
            else:
                line_num += 1
    # make new dirctory
    target_file = Path(cur_path + "/" + args.taxid + "_assembly_genome")
    if target_file.exists():
        storage_path = cur_path + "/" + args.taxid + "_assembly_genome" + "/" + args.taxid + "_download_url.txt"
        with open(storage_path, "w") as out_file:
            for url in urls:
                out_file.write(url + "/" + url.split("/")[-1] +
                               "_genomic.fna.gz" + "\n")
    else:
        cmd_0 = "mkdir " + args.taxid + "_assembly_genome"
        subprocess.check_output(cmd_0, shell=True)
        storage_path = cur_path + "/" + args.taxid + "_assembly_genome" + "/" + args.taxid + "_download_url.txt"
        with open(storage_path, "w") as out_file:
            for url in urls:
                out_file.write(url + "/" + url.split("/")[-1] +
                               "_genomic.fna.gz" + "\n")
    in_file.close()
    out_file.close()
    return urls
Example #33
0
 def log(self):
     """Context manager for the spike log file."""
     path = Path(self.config["output_dir"]) / self.config["log_file"]
     if not path.exists():
         raise BluepySnapError(
             "Cannot find the log file for the spike report.")
     yield open(str(path), "r")
Example #34
0
    def __init__(self):
        self.xcode = None
        self.repo_overrides = dict()

        self.root_path = Path.cwd()  # type: Path

        self.library_directory = Path('~/Library/Application Support/io.schwa.Punic').expanduser()
        if not self.library_directory.exists():
            self.library_directory.mkdir(parents=True)
        self.repo_cache_directory = self.library_directory / 'repo_cache'
        if not self.repo_cache_directory.exists():
            self.repo_cache_directory.mkdir(parents=True)
        self.punic_path = self.root_path / 'Carthage'
        self.build_path = self.punic_path / 'Build'
        self.checkouts_path = self.punic_path / 'Checkouts'

        self.derived_data_path = self.library_directory / "DerivedData"

        self.platforms = Platform.all
        self.configuration = None

        self.fetch = False
        self.xcode = Xcode.default()

        self.toolchain = None
        self.dry_run = False
        self.use_submodules = False
        self.use_ssh = False

        self.skips = []

        self.verbose = False
        self.echo = False

        self.continuous_integration = 'CI' in os.environ
        if self.continuous_integration:
            logging.info("Running on continuous integration")

        # Read in defaults from punic.yaml (or punic.yml if that exists)
        punic_configuration_path = Path('punic.yaml')
        if not punic_configuration_path.exists():
            punic_configuration_path = Path('punic.yml')
        if punic_configuration_path.exists():
            self.read(punic_configuration_path)
        runner.cache_path = self.library_directory / "cache.shelf"
Example #35
0
def main(src, dest, force):
    """links configfiles from one folder to another

    if links exists it verifies content
    if files exist at the target side it errors

    Args:
        src: source folder
        dest: target folder
        force: override existing symlinks
    """
    src = Path(src)
    if not src.exists():
        print("WARNING:", src, "does not exist, skipping linking")
        return

    dest = Path(dest)

    for element in filter(_is_yaml_file, src.iterdir()):
        _warn_on_unknown_encryption(element)
        target = dest.joinpath(element.name)

        if force:
            try:
                target.symlink_to(element.resolve())
            except OSError as e:
                if e.errno == errno.EEXIST:
                    backup_target = Path(dest.joinpath(element.name + "_bak"))
                    print("Replacing", target.name, "and saving backup as", backup_target.name)
                    # Would use 'backup_target.replace()' here but that's only supported in py3
                    if backup_target.exists():
                        os.remove(str(backup_target))
                    target.rename(backup_target)

                    target.symlink_to(element.resolve())
                else:
                    raise
        else:
            if target.is_symlink():
                # If symlink already exists and points to same src, do nothing.
                _check_missmatching_symlink(src=element, target=target)
            elif _check_existing_file(target):
                target.symlink_to(element.resolve())
                print("Symlink created for", target.name)
Example #36
0
def import_dashboards(path, recursive=False):
    """Import dashboards from JSON"""
    p = Path(path)
    files = []
    if p.is_file():
        files.append(p)
    elif p.exists() and not recursive:
        files.extend(p.glob('*.json'))
    elif p.exists() and recursive:
        files.extend(p.rglob('*.json'))
    for f in files:
        logging.info('Importing dashboard from file %s', f)
        try:
            with f.open() as data_stream:
                dashboard_import_export.import_dashboards(
                    db.session, data_stream)
        except Exception as e:
            logging.error('Error when importing dashboard from file %s', f)
            logging.error(e)
Example #37
0
def find_spec(package):
    """
    From a package name locate the spec file
    """
    spec_search = Configuration.get('spec', 'search-path',
                                    default='SPECS').split(':')
    for subdir in spec_search:
        path = Path(subdir, package+'.spec')
        if path.exists():
            return path
    return None
Example #38
0
def find_link_pin(package):
    """
    From a package name locate the link or pin file
    """
    pin_search = Configuration.get('pin', 'search-path',
                                   default='SPECS').split(':')
    for suffix in ('.pin', '.lnk'):
        for subdir in pin_search:
            path = Path(subdir, package+suffix)
            if path.exists():
                return path
    return None
Example #39
0
def import_datasources(path, sync, recursive=False):
    """Import datasources from YAML"""
    sync_array = sync.split(',')
    p = Path(path)
    files = []
    if p.is_file():
        files.append(p)
    elif p.exists() and not recursive:
        files.extend(p.glob('*.yaml'))
        files.extend(p.glob('*.yml'))
    elif p.exists() and recursive:
        files.extend(p.rglob('*.yaml'))
        files.extend(p.rglob('*.yml'))
    for f in files:
        logging.info('Importing datasources from file %s', f)
        try:
            with f.open() as data_stream:
                dict_import_export_util.import_from_dict(
                    db.session,
                    yaml.safe_load(data_stream),
                    sync=sync_array)
        except Exception as e:
            logging.error('Error when importing datasources from file %s', f)
            logging.error(e)
Example #40
0
    def __init__(self, options):
        self.do = options['data_options']
        self.mo = options['model_options']
        self.oo = options['optimization_options']
        self.lo = options['log_options']

        data_path = self.do['data_path']
        task_num = self.do['task_number']
        lang = self.do.get('language', 'en')  # defaults to use small Eng set
        self.qa_train, self.qa_test \
            = read_dataset(data_path,
                           task_num, lang, options['data_options']['reader'],
                           {'threshold': 0,
                            'context_length': self.mo['context_length'],
                            'context_length_percentage': self.mo.get('context_length_percentage', 1),
                            'sentence_length': self.mo['sentence_length']})

        self.data_size = len(self.qa_train.stories)
        self.mo['context_length'] = self.qa_train.context_length
        #self.options['model_options']['context_length'] = self.qa_train.context_length

        tokens = self.qa_train.specialWords
        self.NULL = tokens['<NULL>']
        self.EOS = tokens['<EOS>']
        self.UNKNOWN = tokens['<UNKNOWN>']

        if self.oo['dump_params']:
            weight_dir = Path(self.oo['weight_path'])
            if not weight_dir.exists():
                weight_dir.mkdir()
        self.batch_size_train = self.oo['batch_size_train']
        self.batch_size_test = self.oo['batch_size_test']

        self.verbose = self.oo['verbose']
        self.log = self.logger_factory()
        self.lo['dump_epoch'] = self.oo['max_epoch'] \
                                if self.lo['dump_epoch'] < 0 \
                                else self.lo['dump_epoch']

        vocab_size = len(self.qa_train.index_to_word)
        options['model_options']['vocab_size'] = vocab_size
        model_name = self.mo['model_name']
        self.model = Models.model(model_name)(options)
        self.log("context length: %d" % self.mo['context_length'])
Example #41
0
def preprocess_options(options, disp=False):
    if disp:
        print "options:\n", json.dumps(options, indent=4, sort_keys=False)

    log_options = options['log_options']
    if log_options['dump_config']:
        path = Path(log_options['dump_path'])
        if not path.exists():
            path.mkdir()
        dumpname = log_options['dump_name']
        basename = os.path.splitext(dumpname)[0] + '.json'
        json.dump(options,
                  open(
                      str(path / basename), 'w'),
                  indent=4,
                  sort_keys=False)

    data_readers = {'QAReader': QAReader,
                    'minibatch': MinibatchReader}

    options['data_options']['reader'] \
    = data_readers[options['data_options']['reader']]
Example #42
0
def new_page():
    from string import Template     # Use Python templates, not Mako templates

    slug = raw_input('Slug for page: ')
    title = raw_input('Title of page: ')
    template = raw_input('Template to inherit from (default is example.html): ')

    new_dir = Path('web') / slug
    if new_dir.exists():
        print '\nDirectory %s already exists, aborting' % new_dir
        return
    new_dir.mkdir()

    html_file = new_dir / 'index.html'
    with html_file.open('w') as fp:
        fp.write(Template(NEW_PAGE_HTML_TEMPLATE).substitute(
            title=repr(title.strip()), template=template.strip() or 'example.html'))

    js_file = new_dir / 'app.dart'
    with js_file.open('w') as fp:
        class_name = ''.join(s.capitalize() for s in title.split(' '))
        fp.write(Template(NEW_PAGE_CODE_TEMPLATE).substitute(title=title))
Example #43
0
def test_has_files(tmp_crumb):
    assert not op.exists(tmp_crumb.path)

    assert not tmp_crumb.has_files()

    values_dict = {'session_id': ['session_{:02}'.format(i) for i in range( 2)],
                   'subject_id': ['subj_{:03}'.format(i)    for i in range( 3)],
                   'modality':   ['anat'],
                   'image':      ['mprage1.nii', 'mprage2.nii', 'mprage3.nii'],
                   }

    paths = mktree(tmp_crumb, list(ParameterGrid(values_dict)))

    assert op.exists(tmp_crumb.split()[0])

    assert not tmp_crumb.has_files()

    pa = Path(str(paths[0]))
    pa.rmdir()
    pa.touch()

    assert pa.exists()
    assert pa.is_file()
    assert tmp_crumb.has_files()
Example #44
0
def punic_cli(context, echo, verbose, timing, color):
    ### TODO: Clean this up!

    # Configure click
    context.token_normalize_func = lambda x: x if not x else x.lower()

    # Configure logging
    level = logging.DEBUG if verbose else logging.INFO

    logger = logging.getLogger()
    logger.setLevel(logging.DEBUG)

    formatter = HTMLFormatter()

    # create console handler and set level to debug
    stream_handler = logging.StreamHandler()
    stream_handler.setLevel(level)
    stream_handler.setFormatter(formatter)
    # add ch to logger
    logger.addHandler(stream_handler)

    # TODO: This needs to be a better location
    logs_path = Path('~/Library/Application Support/io.schwa.Punic/Logs').expanduser()
    if not logs_path.exists():
        logs_path.mkdir(parents=True)

    log_path = logs_path / "punic.log"
    needs_rollover = log_path.exists()

    file_handler = logging.handlers.RotatingFileHandler(str(log_path), backupCount=4)
    if needs_rollover:
        file_handler.doRollover()
    file_handler.setLevel(logging.DEBUG)
    file_handler.setFormatter(HTMLStripperFormatter(logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")))
    logger.addHandler(file_handler)

    for name in ['boto', 'requests.packages.urllib3']:
        named_logger = logging.getLogger(name)
        named_logger.setLevel(logging.WARNING)
        named_logger.propagate = True

    runner.echo = echo

    # Set up punic
    punic = Punic()
    punic.config.log_timings = timing
    context.obj = punic
    punic.config.verbose = verbose
    punic.config.echo = verbose

    # Color:
    if color is None:
        if punic.config.continuous_integration:
            color = False

    if color is None:
        color = True


    punic.config.color = color
    formatter.color = color
    logger.color = color
Example #45
0
class Config(object):
    def __init__(self):
        self.defaults = {
            'configuration': None,
            'platforms': [],
        }
        self.xcode = None
        self.repo_overrides = dict()

        self.root_path = Path.cwd()  # type: Path

        self.library_directory = Path(os.path.expanduser('~/Library/io.schwa.Punic'))
        if not self.library_directory.exists():
            self.library_directory.mkdir(parents=True)
        self.repo_cache_directory = self.library_directory / 'repo_cache'
        if not self.repo_cache_directory.exists():
            self.repo_cache_directory.mkdir(parents=True)
        self.punic_path = self.root_path / 'Carthage'
        self.build_path = self.punic_path / 'Build'
        self.checkouts_path = self.punic_path / 'Checkouts'

        self.derived_data_path = self.library_directory / "DerivedData"

        runner.cache_path = self.library_directory / "cache.shelf"

        self.can_fetch = False
        self.xcode = Xcode.default()

        # Read in defaults from punic.yaml
        self.read(Path('punic.yaml'))


    @property
    def xcode_version(self):
        return self.xcode.version if self.xcode else None

    @xcode_version.setter
    def xcode_version(self, value):
        xcode = Xcode.with_version(value)
        if value and not xcode:
            raise Exception('Could not find xcode version: {}'.format(value))
        if not xcode:
            xcode = Xcode.default()
        self.xcode = xcode


    def read(self, path):
        # type: (Path)

        if not path.exists():
            return

        d = pureyaml.load(path.open())
        if 'defaults' in d:
            defaults = d['defaults']
            if 'configuration' in defaults:
                self.configuration = defaults['configuration']
            if 'platforms' in defaults:
                self.platforms = parse_platforms(defaults['platforms'])
            elif 'platform' in defaults:
                self.platforms = parse_platforms(defaults['platform'])

        if 'repo-overrides' in d:
            self.repo_overrides = d['repo-overrides']

        if 'xcode-version' in d:
            xcode_version = d['xcode-version']
            self.xcode_version = xcode_version

    def dump(self):
        logger.info('Config:')
        logger.info('\tDefaults')
        for k, v in self.defaults.items():
            logger.info('\t\t{}: {}'.format(k, v))
        logger.info('\tOverrides: {}'.format(self.repo_overrides))

    def update(self, configuration=None, platform=None):
        # type: (str, string) -> bool
        if configuration:
            self.configuration = configuration
        if platform:
            self.platforms = parse_platforms(platform)

    @property
    def configuration(self):
        return self.defaults['configuration']

    @configuration.setter
    def configuration(self, configuration):
        self.defaults['configuration'] = configuration

    @property
    def platforms(self):
        return self.defaults['platforms']

    @platforms.setter
    def platforms(self, platforms):
        self.defaults['platforms'] = platforms
Example #46
0
class Config(object):
    def __init__(self):
        self.xcode = None
        self.repo_overrides = dict()

        self.root_path = Path.cwd()  # type: Path

        self.library_directory = Path('~/Library/Application Support/io.schwa.Punic').expanduser()
        if not self.library_directory.exists():
            self.library_directory.mkdir(parents=True)
        self.repo_cache_directory = self.library_directory / 'repo_cache'
        if not self.repo_cache_directory.exists():
            self.repo_cache_directory.mkdir(parents=True)
        self.punic_path = self.root_path / 'Carthage'
        self.build_path = self.punic_path / 'Build'
        self.checkouts_path = self.punic_path / 'Checkouts'

        self.derived_data_path = self.library_directory / "DerivedData"

        self.platforms = Platform.all
        self.configuration = None

        self.fetch = False
        self.xcode = Xcode.default()

        self.toolchain = None
        self.dry_run = False
        self.use_submodules = False
        self.use_ssh = False

        self.skips = []

        self.verbose = False
        self.echo = False

        self.continuous_integration = 'CI' in os.environ
        if self.continuous_integration:
            logging.info("Running on continuous integration")

        # Read in defaults from punic.yaml (or punic.yml if that exists)
        punic_configuration_path = Path('punic.yaml')
        if not punic_configuration_path.exists():
            punic_configuration_path = Path('punic.yml')
        if punic_configuration_path.exists():
            self.read(punic_configuration_path)
        runner.cache_path = self.library_directory / "cache.shelf"

    def update(self, **kwargs):
        for key, value in sorted(kwargs.items()):
            if value:
                if hasattr(self, key):
                    setattr(self, key, value)

        # Special case for platforms
        platform = kwargs['platform'] if 'platform' in kwargs else None
        if platform:
            self.platforms = parse_platforms(platform)

        if self.verbose and os.environ.get('DUMP_CONFIG', False):
            self.dump()

    def dump(self):

        logging.info('# Environment ##' + '#' * 64)

        logging.info('CWD: {}'.format(os.getcwd()))

        key_width = max([len(k) for k in os.environ.keys()] + [len(k) for k in self.__dict__.items()])

        os.environ.keys()

        for key, value in sorted(os.environ.items()):
            logging.info('{:{key_width}}: {}'.format(key, value, key_width = key_width + 1))

        logging.info('# Configuration ' + '#' * 64)

        for key, value in sorted(self.__dict__.items()):
            logging.info('{:{key_width}}: {}'.format(key, value, key_width = key_width + 1))
        logging.info('#' * 80)

    @property
    def xcode_version(self):
        return self.xcode.version if self.xcode else None

    @xcode_version.setter
    def xcode_version(self, value):
        xcode = Xcode.with_version(value)
        if value and not xcode:
            raise Exception('Could not find xcode version: {}'.format(value))
        if not xcode:
            xcode = Xcode.default()
        self.xcode = xcode

    def read(self, path):
        # type: (Path)

        d = yaml.safe_load(path.open())
        if not d:
            return
        if 'defaults' in d:
            defaults = d['defaults']
            if 'configuration' in defaults:
                self.configuration = defaults['configuration']
            if 'platforms' in defaults:
                self.platforms = parse_platforms(defaults['platforms'])
            elif 'platform' in defaults:
                self.platforms = parse_platforms(defaults['platform'])
            if 'xcode-version' in defaults:
                self.xcode_version = six.text_type(defaults['xcode-version'])

            if 'use-ssh' in defaults:
                self.use_ssh = defaults['use-ssh']

        if 'repo-overrides' in d:
            self.repo_overrides = d['repo-overrides']

        if 'skips' in d:
            self.skips = d['skips'] or []
Example #47
0
    else:
        gmx_mpi.symlink_to(gmx_exe)
        return str(gmx_mpi.expanduser())


@pytest.fixture
def modified_config(request):
    link_gmx_mpi = request.config.getoption('link_gmx_mpi')
    tools = str(Path('~/gmx_mpi').expanduser()) if link_gmx_mpi else ''
    append_suffix = 'yes' if request.config.getoption('append_suffix') else 'no'
    return tools, append_suffix, Path


path_config = Path('~/.gromacswrapper.cfg').expanduser()
gw_config = ConfigParser()
if path_config.exists():
    gw_config.read(str(path_config.resolve()))
    config_existed = True
else:
    gw_config.read('gromacs/templates/gromacswrapper.cfg')
    config_existed = False
config_backup = path_config.with_suffix('.bak')


def pytest_configure(config):
    link_gmx_mpi = config.getoption('link_gmx_mpi')
    append_suffix = 'yes' if config.getoption('append_suffix') else 'no'
    if config_existed:
        shutil.copy(str(path_config), str(config_backup))
    tools = gmx_mpi_linked(link_gmx_mpi)
    gw_config.set('Gromacs', 'tools', tools)