Example #1
0
def test_convert_pdf_page_to_web(tmp_fixture_dir):
    """Convert single page pdf to web version"""

    pdf = Path(tmp_fixture_dir) / 'PDF' / PAGE_ONE

    # Make new file
    opt = convert_pdf_to_web(pdf)

    # Default file location
    assert opt == pdf.parent / 'WEB' / PAGE_ONE
    assert opt.exists()

    # Conversion changes file content
    assert opt.stat().st_size != pdf.stat().st_size

    # Calling again should not create new file
    mtime = opt.stat().st_mtime
    opt2 = convert_pdf_to_web(pdf)
    assert opt2.stat().st_mtime == mtime

    # If pdf if changed, convert should rerun
    pdf.touch()
    opt3 = convert_pdf_to_web(pdf)
    assert opt3.stat().st_mtime > mtime

    # Nonexisting file should raise error
    false_pdf = pdf.with_name('nonexisting')
    assert not false_pdf.is_file()
    with pytest.raises(FileNotFoundError):
        convert_pdf_to_web(false_pdf)
Example #2
0
def test_open(w):
  mask = IN_OPEN | IN_CLOSE
  w.add('testfile', mask)
  watch = w._paths[P('testfile')]

  assert len(watch.links) == 2
  assert watch.path == P('testfile')
  assert watch.watcher == w
  assert watch.mask == mask

  link1 = watch.links[0]
  assert link1.idx == 0
  assert link1.path == str(P.cwd())
  assert link1.rest == 'testfile'
  assert link1.mask == IN_UNMOUNT | IN_ONLYDIR | IN_EXCL_UNLINK | IN_IGNORED | IN_MOVE | IN_DELETE | IN_CREATE
  assert link1.watch == watch
  wd = link1.wd
  assert wd.callbacks['testfile'] == [link1]
  assert wd.mask == link1.mask
  assert wd.watcher == w
  watchdesc = wd.wd
  assert w._watchdescriptors[watchdesc] == wd

  link2 = watch.links[1]
  assert link2.idx == 1
  assert link2.path == str(P.cwd()['testfile'])
  assert link2.rest == '.'
  assert link2.mask == IN_OPEN | IN_CLOSE
  assert link2.watch == watch
  wd = link2.wd
  assert wd.callbacks[None] == [link2]
  assert wd.mask == link2.mask
  assert wd.watcher == w
  watchdesc = wd.wd
  assert w._watchdescriptors[watchdesc] == wd
  
  open('testfile').close()
  evts = w.read(block=False)
  ev1, ev2 = evts
  assert ev1.open
  assert ev2.close
  assert ev2.close_nowrite

  os.remove('testfile')
  ev3 = w.read(block=False)[0]
  assert ev3.path_delete and ev3.path_changed
  assert ev3.path == 'testfile'
  assert P(ev3.name).parts[-1] == 'testfile'

  w.close()
Example #3
0
    def key_file(self, value):
        if self.connected:
            raise SFTPSetOnConnectedError('key_file', value)
        kfile = None
        if value is None:
            kfile = DEFAULT_SSH_KEY
        else:
            if isinstance(value, Path):
                kfile = value
            else:
                kfile = PosixPath(str(value))

        if str(kfile).startswith('~'):
            kfile = kfile.expanduser()

        self._key_file = kfile
Example #4
0
def test_multi(w):
  open('file2', 'w').close()
  os.symlink('file2', 'link2')
  os.symlink(str(P.cwd()['link2']), 'link3')
  os.symlink('testfile', 'link4')
  
  w.add('link3', IN_OPEN)
  w.add('link4', IN_OPEN)

  open('file2').close()
  evts = w.read()
  assert len(evts) == 1
  e = evts[0]
  assert e.open
  assert e.path == 'link3'

  open('testfile').close()
  evts = w.read()
  assert len(evts) == 1
  e = evts[0]
  assert e.open
  assert e.path == 'link4'

  assert len(w._watchdescriptors) == 5

  os.remove('link3')
  os.symlink('link4', 'link3')

  evts = w.read()
  open('testfile').close()
  evts.extend(w.read())
  assert len(evts) == 4
Example #5
0
def _naive_relative_to(path: PosixPath, root: PosixPath) -> PurePosixPath:
    """
    Compute relative PurePosixPath, result may include '..' parts.

    Both arguments must be absolute PurePosixPath's and lack '..' parts.

    Possibility of symlinks is ignored, i. e. arguments are interpreted
    as resolved paths.
    """
    if not path.is_absolute():
        raise ValueError(path)
    if not root.is_absolute():
        raise ValueError(root)
    if '..' in path.parts:
        raise ValueError(path)
    if '..' in root.parts:
        raise ValueError(root)
    upstairs = 0
    while root not in path.parents:
        parent = root.parent
        assert parent != root
        root = parent
        upstairs += 1
    return PurePosixPath(
        * (('..',) * upstairs),
        path.relative_to(root) )
def search_dir(d: PosixPath, basepath, files, extension):
    """Search directories recursively for repo files."""
    scripts = []
    for path in d.iterdir():
        string_path = str(path).replace(basepath, '')[1:]
        if path.is_dir():
            recu_scripts = search_dir(path, basepath, files, extension)
            if recu_scripts:
                scripts.extend(recu_scripts)
        if string_path in files and path.suffix == extension:
            scripts.extend([string_path])
    return scripts
Example #7
0
    def local_dir(self, value):

        if value is None:
            raise ValueError("The local directory may not be set to None.")

        ldir = None
        if isinstance(value, Path):
            ldir = value
        else:
            ldir = PosixPath(str(value))

        if str(ldir).startswith('~'):
            ldir = ldir.__class__(os.path.expanduser(str(ldir)))

        if self.connected:
            if not ldir.is_dir():
                msg = "Directory %r does not exists or is not a directory." % (ldir)
                raise SFTPLocalPathError(msg)
            os.chdir(str(ldir))

        self.base_dir = str(ldir)
        self._local_dir = ldir
Example #8
0
def main():
    # type: () -> None
    import argparse
    parser = argparse.ArgumentParser(
        description="Windows Explorer"
    )

    parser.add_argument(
        "posixpath",
        nargs='?',
        default='.',
    )

    args = parser.parse_args()

    call_explorer(
        posixpath=PosixPath(args.posixpath)
        )
def test_report_summary_no_warnings(capsys):
    """
    Given:
        - Lint manager dictionary with one pack which has warnings.

    When:
        - Creating summary of the lint.

    Then:
        - Ensure that there are no warnings printed in the summary and all passed.
    """
    from demisto_sdk.commands.lint import lint_manager
    lint_status = {
        'fail_packs_flake8': [],
        'fail_packs_XSOAR_linter': [],
        'fail_packs_bandit': [],
        'fail_packs_mypy': [],
        'fail_packs_vulture': [],
        'fail_packs_pylint': [],
        'fail_packs_pytest': [],
        'fail_packs_pwsh_analyze': [],
        'fail_packs_pwsh_test': [],
        'fail_packs_image': [],
        'warning_packs_flake8': [],
        'warning_packs_XSOAR_linter': [],
        'warning_packs_bandit': [],
        'warning_packs_mypy': [],
        'warning_packs_vulture': [],
        'warning_packs_pylint': [],
        'warning_packs_pytest': [],
        'warning_packs_pwsh_analyze': [],
        'warning_packs_pwsh_test': [],
        'warning_packs_image': []
    }
    pkg = [
        PosixPath(
            '/Users/test_user/dev/demisto/content/Packs/Maltiverse/Integrations/Maltiverse'
        )
    ]
    lint_manager.LintManager.report_summary(pkg=pkg, lint_status=lint_status)
    captured = capsys.readouterr()
    assert "Packages PASS: \x1b[32m1\x1b[0m" in captured.out
    assert "Packages WARNING (can either PASS or FAIL): \x1b[33m0\x1b[0m" in captured.out
    assert "Packages FAIL: 0" in captured.out
Example #10
0
def installNeoVim(args):
    print("Checking for Vim directory")
    vimDir = PosixPath("~/.config/nvim/").expanduser()
    if not vimDir.exists():
        print("Neovim config dir not found, need to install...")
    else:
        copyfile(PosixPath("./shared/init.vim"),
                 PosixPath("~/.config/nvim/init.vim").expanduser())
        copyfile(PosixPath("./shared/plug.vim"),
                 PosixPath("~/.config/nvim/autoload/plug.vim").expanduser())
        os.system("python3 -m pip install --user --upgrade pynvim")
        print("Additional Notes:")
        print("================")
        print("Install universal-ctags...")
        print("https://github.com/universal-ctags/ctags")
        print("Install the coc-json and coc-tsserver in nvim:")
        print(":CocInstall coc-json coc-tsserver")
Example #11
0
    def run(self):
        if not self.user_authorize('system', 'manage'):
            return
        while True:
            task = self.task_q.get()
            logging.info(task)

            if task[0] == 'UPLOAD':
                p = PosixPath(task[1])
                if p.is_dir():
                    target_dir = os.path.join(task[2], p.name)
                    if self.do_mkdir(p.name, task[2]):
                        children = [d for d in p.iterdir()]
                        children.sort()
                        for f in children:
                            self.task_q.put(['UPLOAD', str(f), target_dir])
                else:
                    self.do_upload(str(p), task[2])
            elif task[0] == 'DOWNLOAD':
                R = self._stat(task[1])
                if R["fileType"]==1: #dir
                    p = PosixPath(task[2]).joinpath(R["name"]=="" and "ROOT" or R["name"])
                    p.mkdir()
                    target_dir = os.path.join(task[2], p.name)
                    for r in self._stat2(task[1]):
                        self.task_q.put(['DOWNLOAD', r["path"], target_dir])
                else:
                    self.do_download(task[1], task[2])
            elif task[0] == 'MKDIR':
                self.do_mkdir(os.path.basename(task[1]), os.path.dirname(task[1]))
            elif task[0] == 'STAT':
                self.do_stat(task[1])
            elif task[0] == 'LS':
                self.do_ls(task[1])
            elif task[0] == 'LS-R':
                self.do_ls_r(task[1])
            elif task[0] == 'RENAME':
                self.do_rename(task[1], task[2])
            elif task[0] == 'REMOVE':
                self.do_delete(task[1])
            elif task[0] == 'EXIT':
                self.close()
                logging.info("Exit.")
                self.task_q.task_done()
                return
            else:
                msg = "Invalid task: %s" % str(task)
                logging.error(msg)
                TestClient.ERRORS.append(msg)

            self.task_q.task_done()
Example #12
0
 def get(self,
         resource: Text,
         path: Text,
         location_type: Optional[DataLocationType] = None
         ) -> Set[DataLocation]:
     resource = resource or LOCAL_RESOURCE
     node = self._filesystems.get(resource)
     if not node:
         return set()
     path = Path(path) if resource == LOCAL_RESOURCE else PosixPath(path)
     for token in path.parts:
         if token in node.children:
             node = node.children[token]
         else:
             return set()
     return ({
         loc
         for loc in node.locations if loc.location_type == location_type
     } if location_type else node.locations)
Example #13
0
File: text.py Project: tifv/jeolm
 def __init__( self, path: PosixPath, text: str,
     *, build_dir_node: DirectoryNode,
     name: Optional[str] = None, needs: Iterable[Node] = (),
 ) -> None:
     if path.parent != build_dir_node.path:
         raise RuntimeError(path)
     var_name = '{name}.{hash}'.format(
         name=path.name, hash=text_hash(text) )
     var_text_node = VarTextNode(
         path=path.with_name(var_name), text=text,
         name='{}:var'.format(name),
         needs=(build_dir_node,) )
     if isinstance(build_dir_node, BuildDirectoryNode):
         build_dir_node.register_node(var_text_node)
     super().__init__(
         source=var_text_node, path=path,
         name=name,
         needs=(*needs, build_dir_node) )
     self.text = text
Example #14
0
def resolve_path(path):
    '''Resolve the symlinks in path, yielding all filesystem locations that are traversed.

    The yielded value is a tuple, of which the first element is a symlink-free
    path, and the second element is a path relative to the first element that
    has not yet been traversed. This second element may contain more symlinks.
    
    The resolution implementation will follow an unbounded number of symlinks
    but will still detect symlink loops if they prevent a path from resolving.

    path can be given as a string or as a pathlib object. The yielded values
    are pathlib.PosixPath objects.

    '''
    linkcache = {}
    linkcounter = [0]
    for p in resolve_symlink(_curdir, PosixPath(path), set(),
                                  linkcache, linkcounter):
        yield p
Example #15
0
    def test_experiment_output(self) -> None:
        config = Config(LABBY_CONFIG_YAML)
        sequence = ExperimentSequence("./sequences/seq.yaml", SEQUENCE_YAML)
        runner = ExperimentRunner(config, sequence)

        with patch_time("2020-08-08"), patch_file_contents(
                "output/seq/000.csv") as output, patch(
                    "os.makedirs") as makedirs:
            runner.start()
            runner.join()

        makedirs.assert_called_with(PosixPath("output/seq/"), exist_ok=True)
        self.assertEqual(len(output.write.call_args_list), 4)
        output.write.assert_has_calls([
            call("seconds,voltage\n"),
            call("0.0,15.0\n"),
            call("0.5,15.0\n"),
            call("1.0,15.0\n"),
        ])
Example #16
0
def select_model_by_facet_value(
    facet_value: str,
    root: PosixPath = Path("/kbdata/Processed/Models/")) -> dict:
    """Select models over time by a specific facet value.
    Arguments:
        facet_value (str): selected facet value, e.g. 'Katholiek' 
        root (PosixPath): the folder where all models are stored
    Returns:
        a dictionary that maps year to a path
    """
    models = root.glob(f"*-{facet_value}.w2v.model")

    out = {}
    for m in models:

        start = m.stem.lstrip("FT-").split('-')[0]
        out[int(start)] = m

    return out
Example #17
0
def test_generate_snap_graph_no_speckle_filter(safe_file):
    params = {"mask": ["sea"], "tcorrection": False, "speckle_filter": False}

    SNAPPolarimetry(params).generate_snap_graph(
        safe_file.feature,
        "VV",
        "/tmp/input/S1B_IW_GRDH_1SDV_"
        "20190220T050359_20190220T050424_015025_01C12F_4EA4.SAFE_vv",
    )
    graph_xml_file = PosixPath(
        "/tmp/S1B_IW_GRDH_1SDV_"
        "20190220T050359_20190220T050424_015025_01C12F_4EA4.SAFE_VV.xml")
    tree = ET.parse(str(graph_xml_file))
    all_nodes = tree.findall("node")

    node_id_list = [graph_node.attrib["id"] for graph_node in all_nodes]

    assert "Speckle-Filter" not in node_id_list
    assert "LinearToFromdB" in node_id_list
Example #18
0
class API:
    section = 'api'
    TITLE = config.get(section, 'title', fallback='TensorHive API')
    URL_SCHEMA = config.get(section, 'url_schema', fallback='http')
    URL_HOSTNAME = config.get(section, 'url_hostname', fallback='0.0.0.0')
    URL_PORT = config.get(section, 'url_port', fallback='1111')
    URL_PREFIX = config.get(section, 'url_prefix', fallback='api')
    SPEC_FILE = config.get(section,
                           'spec_file',
                           fallback='api_specification.yml')
    IMPL_LOCATION = config.get(section,
                               'impl_location',
                               fallback='tensorhive.api.controllers')

    import yaml
    respones_file_path = str(
        PosixPath(__file__).parent / 'controllers/responses.yml')
    with open(respones_file_path, 'r') as file:
        RESPONSES = yaml.safe_load(file)
Example #19
0
def test_nested_album_art_album_layout_case_2a(album_layout_case_2a):
    """Find nested album art of case 2a."""
    for instance in album_layout_case_2a:
        with mock.patch("os.walk", return_value=instance):
            album_path = instance[0][0]
            directories_in_album_directory = instance[0][1]
            with mock.patch("os.listdir",
                            return_value=directories_in_album_directory):
                with mock.patch("os.path.isfile", return_value=False):
                    result = teeb.find.nested_album_art(album_path)
                    expected = {
                        "case1": [],
                        "case2": [{
                            "art_dir": f"{album_path}/album_art",
                            "art_files": ["cover.jpg", "back.jpg"],
                            "parent_dir": PosixPath(album_path),
                        }],
                    }
                    assert result == expected
Example #20
0
 def __init__(self, path: pathlib.PosixPath):
     self.name = path.name
     self.image_paths = []
     self.preview_path = path.parent / "preview.jpg"
     for image_path in sorted([
             x for x in path.glob("./*") if x.is_file() and any(
                 str(x).endswith(ext) for ext in [".png", ".jpg", ".jpeg"])
     ]):
         if image_path.name.startswith("preview"):
             self.preview_path = image_path
         else:
             self.image_paths.append(image_path)
     self.num_items = len(self.image_paths)
     self.image_width, self.image_height = self._validate_image_dimensions()
     self.images_per_row = math.ceil(math.sqrt(self.num_items))
     self.images_per_column = (math.floor(
         self.num_items / self.images_per_row) if self.num_items %
                               self.images_per_row == 0 else math.ceil(
                                   self.num_items / self.images_per_row))
Example #21
0
def train_fold(scandir, datadir, use_tptrw, n_splits):
    """Perform a folded training based on a hyperparameter scan result."""
    from tdub.ml_train import folded_training, prepare_from_root
    from tdub.data import quick_files

    scandir = PosixPath(scandir).resolve()
    summary_file = scandir / "best" / "summary.json"
    outdir = scandir / "foldres"
    if outdir.exists():
        log.warn(f"fold result already exists for {scandir}, exiting")
        return 0
    summary = None
    with summary_file.open("r") as f:
        summary = json.load(f)
    nlo_method = summary["nlo_method"]
    best_iteration = summary["best_iteration"]
    if best_iteration > 0:
        summary["all_params"]["n_estimators"] = best_iteration
    region = summary["region"]
    branches = summary["features"]
    selection = summary["selection_used"]
    qf = quick_files(datadir)
    df, y, w = prepare_from_root(
        qf[f"tW_{nlo_method}"],
        qf["ttbar"],
        region,
        override_selection=selection,
        branches=branches,
        weight_mean=1.0,
        use_tptrw=use_tptrw,
    )
    folded_training(
        df,
        y,
        w,
        summary["all_params"],
        {"verbose": 10},
        str(outdir),
        summary["region"],
        kfold_kw={"n_splits": n_splits, "shuffle": True},
    )
    return 0
Example #22
0
def download_audioset_sound_files(path_to_audioset_folder, data_type,
                                  num_classes):
    """

    :param path_to_audioset_csv_folder:
    :param data_type:
    :return:
    """

    csv_data = pd.read_csv(
        os.path.join(path_to_audioset_folder, 'csv',
                     '{}_segments_{}.csv'.format(data_type, num_classes)))

    # Create the folder in which we store the sound files
    sound_folder = Path(path_to_audioset_folder) / PosixPath(data_type)
    os.makedirs(os.path.join(sound_folder, 'wav'), exist_ok=True)

    for idx in range(len(csv_data)):
        video_id = csv_data['YTID'][idx]
        start_time = csv_data['start_seconds'][
            idx] + 1.0  # remove sound files borders
        end_time = csv_data['end_seconds'][
            idx] - 1.0  # remove sound files borders

        sys.stdout.write('\r  Video {}/{} information: {}  {}  {}'.format(
            idx, len(csv_data), video_id, start_time, end_time))
        os.system(
            'youtube-dl "{id}" --quiet --extract-audio --audio-format wav '
            '--output "{folder}/wav/{outname}"'.format(
                id='https://youtube.com/watch?v=' + video_id,
                outname=video_id + '.%(ext)s',
                folder=sound_folder))
        os.system(
            'ffmpeg -loglevel quiet -i "{folder}/wav/{outname}.wav" '
            '-ar "44100" -ss "{start}" -to "{end}" "{folder}/wav/{outname}_out.wav"'
            .format(outname=video_id,
                    start=start_time,
                    end=end_time,
                    folder=sound_folder))
        os.system(
            'mv "{folder}/wav/{outname}_out.wav" "{folder}/wav/{outname}.wav"'.
            format(outname=video_id, folder=sound_folder))
Example #23
0
    def test_get_stats(self):
        project = Project(project_name, workspace=workspace, epsg=epsg)
        pointcloud = project.add_new_pointcloud(point_cloud_name, file_format='las')

        names_polygons = misc.get_names_and_polygons_in_workspace(workspace,
                                                                  settings={'step': 25, 'x_pos': 3, 'y_pos': 4})
        for data in names_polygons:
            pointcloud.add_tile(data['name'], data['polygon'])

        stats = project.get_stats()
        true_values = {'name': 'Test',
                       'num_pointclouds': 1,
                       'workspace': PosixPath('test_data'),
                       'pointclouds': {'p1':
                                           {'area': 186.82999999999998,
                                            'class_frequency': {2: 17171, 3: 4760},
                                            'num_points': 21931,
                                            'density': 117.38,
                                            'tiles': 2}}}
        self.assertEqual(true_values, stats)
Example #24
0
    def test_put_updates_package_version_data(self, app, client,
                                              test_media_directory):
        n = Namespace(name='Hello')
        p = Package.create(name='Dog Bog', namespace=n)

        version = '1.0.0'

        bytes1 = b'not a zip'
        response = client.put(
            f"/api/v1/namespaces/{n.slug}/{p.slug}/{version}",
            data={"file": (BytesIO(bytes1), "workstation.zip")})
        assert response.status_code == 200

        response = client.put(
            f"/api/v1/namespaces/{n.slug}/{p.slug}/{version}",
            json={"local": "C:/bobo/"})
        assert response.status_code == 200

        version = PackageVersion.query.one()
        assert version.local == PosixPath("C:/bobo/")
Example #25
0
def find_jupyter_command(proc: str) -> Optional[JupyterCommand]:
    log = logging.getLogger(__name__).getChild("auto")
    parts = shlex.split(proc)

    if parts[0] in ("pgrep", "xargs"):
        return None

    python = parts[0]
    log.debug("Python candidate = {!r}".format(parts))
    for p in parts[1:]:
        if p.endswith("jupyter-notebook"):
            jupyter = p
            break
        if p.endswith("jupyter-lab"):
            jupyter = str(PosixPath(p).parent / "jupyter-notebook")
            break
    else:
        log.debug("Can't find jupyter notebook in candidate = {}".format(proc))
        return None
    return JupyterCommand(python, jupyter)
Example #26
0
def load_dataset(dataset, training_chip_size, bs):
    """ Load a dataset, create batches and augmentation """

    path = PosixPath(dataset)
    label_path = path / 'label-chips'
    image_path = path / 'image-chips'
    image_files = get_image_files(image_path)
    label_files = get_image_files(label_path)
    get_y_fn = lambda x: label_path / f'{x.stem}{x.suffix}'
    codes = np.array(LABELS)
    src = SegmentationItemList.from_folder(image_path).split_by_fname_file(
        '../valid.txt').label_from_func(get_y_fn, classes=codes)
    # some data augmentation here
    data = src.transform(get_transforms(flip_vert=True,
                                        max_warp=0.,
                                        max_zoom=0.,
                                        max_rotate=180.),
                         size=training_chip_size,
                         tfm_y=True).databunch(bs=bs)
    return data
Example #27
0
    async def test_get_log_by_name(self, client, logs_path):
        log_filepath = Mock()
        log_filepath.open = mock_open()
        log_filepath.is_file.return_value = True
        log_filepath.stat.return_value = MagicMock()
        log_filepath.stat.st_size = 1024

        filepath = Mock()
        filepath.name = '190801-13-21-56.log'
        filepath.open = mock_open()
        filepath.with_name.return_value = log_filepath
        with patch.object(package_log, '_get_logs_dir', return_value=logs_path):
            with patch('os.walk'):
                with patch("aiohttp.web.FileResponse", return_value=web.FileResponse(path=filepath)) as f_res:
                    resp = await client.get('/foglamp/package/log/{}'.format(filepath.name))
                    assert 200 == resp.status
                    assert 'OK' == resp.reason
                args, kwargs = f_res.call_args
                assert {'path': PosixPath(pathlib.Path("{}/{}".format(logs_path, filepath.name)))} == kwargs
                assert 1 == f_res.call_count
Example #28
0
def apksign(p_apk: PosixPath, key_path: str, key_alias: str, key_pass: str,
            ks_pass: str) -> PosixPath:
    try:
        signed_apk_name = p_apk.name.replace("-zipaligned.apk", "-signed.apk")
        psigned_apk = p_apk.parent.joinpath(signed_apk_name)

        key_cmd = [
            '--ks', key_path, '--ks-key-alias', key_alias, '--ks-pass',
            'pass:{}'.format(ks_pass), '--key-pass', 'pass:{}'.format(key_pass)
        ]
        cmd = [config.apksigner, 'sign'] + key_cmd + [
            '--out', str(psigned_apk.resolve()),
            str(p_apk.resolve())
        ]
        r = check_output(cmd)
    except Exception as e:
        print("apk signing error: " + str(e))
        return False

    return psigned_apk
Example #29
0
def gen_private_key(key_size: int,
                    key_out: pathlib.PosixPath) -> rsa.RSAPrivateKey:  # noqa
    # check if the private key file already exists
    if key_out.exists():
        emsg = 'File %s already exists' % key_out
        raise Exception(emsg)

    # generate private key
    key = rsa.generate_private_key(public_exponent=65537, key_size=key_size)

    # write to file
    with open(key_out, "wb") as fp:
        fp.write(
            key.private_bytes(
                encoding=serialization.Encoding.PEM,
                format=serialization.PrivateFormat.TraditionalOpenSSL,
                encryption_algorithm=serialization.NoEncryption()))
        fp.close()

    return key
Example #30
0
def create_parser() -> argparse.ArgumentParser:
    parser = argparse.ArgumentParser(description="Generate the Pants reference markdown files.")
    parser.add_argument(
        "--sync",
        action="store_true",
        default=False,
        help="Whether to sync the generated reference docs to the docsite. "
        "If unset, will generate markdown files to the path in --output "
        "instead.  If set, --api-key must be set.",
    )
    parser.add_argument(
        "--output",
        default=PosixPath(os.path.sep) / "tmp" / "pants_docs" / "help" / "option",
        type=Path,
        help="Path to a directory under which we generate the markdown files. "
        "Useful for viewing the files locally when testing and debugging "
        "the renderer.",
    )
    parser.add_argument("--api-key", help="The readme.io API key to use. Required for --sync.")
    return parser
Example #31
0
    def read_id_path(cls, classification_run: ClassificationRunId,
                     read_id: ReadId) -> str:
        """Path to the group that contains the classification results for a given readId.

        Parameters
        ----------
        classification_run : ClassificationRunId
            A unique identifier for the classification run that generated these results (e.g. "my_classication_run_04").

        read_id : ReadId
            The readId of the read we want to know the classification results for.

        Returns
        -------
        str
            Path to the group that contains the classification results for a given readId.
        """
        CLASSICATION_RUN_PATH = cls.for_classification_run(classification_run)
        path = str(PosixPath(CLASSICATION_RUN_PATH, f"{read_id}"))
        return path
Example #32
0
def job_run_ks2():

    # Look for flag files in /mnt/s0/Data and sort them in order of date they were created
    flag_files = list(Path(root_path).glob('**/sort_me.flag'))
    flag_files.sort(key=os.path.getmtime)

    # Start with the oldest flag
    session_path = flag_files[0].parent
    session = str(PosixPath(*session_path.parts[4:]))
    flag_files[0].unlink()

    # Instantiate one
    one = ONE(cache_rest=None)

    # sync the probes
    status, sync_files = sync_probes.sync(session_path)

    if not status:
        _logger.error(f'{session}: Could not sync probes')
        return
    else:
        _logger.info(f'{session}: Probes successfully synced')

    # run ks2
    task = ephys.SpikeSorting(session_path, one=one)
    status = task.run()

    if status != 0:
        _logger.error(f'{session}: Could not run ks2')
        return
    else:
        _logger.info(f'{session}: ks2 successfully completed')

        # Run the cell qc
        # qc_file = []

        # Register and upload files to FTP Patcher
        outfiles = task.outputs
        ftp_patcher = FTPPatcher(one=one)
        ftp_patcher.create_dataset(path=outfiles,
                                   created_by=one._par.ALYX_LOGIN)
Example #33
0
    def test_run_sequence(self) -> None:
        SEQUENCE_CONTENTS = """
---
sequence:
  - experiment_type: labby.tests.test_server.TestExperiment
  - experiment_type: labby.tests.test_server.TestExperiment
"""
        with patch_file_contents(
            "sequence/test.yml", SEQUENCE_CONTENTS
        ), patch_file_contents("output/test/000.csv") as output_0, patch_file_contents(
            "output/test/001.csv"
        ) as output_1, patch(
            "os.makedirs"
        ) as makedirs, patch_time(
            "2020-08-08"
        ):
            self.client.run_sequence("sequence/test.yml")
            while True:
                sequence_status = self.client.experiment_status().sequence_status
                if sequence_status and sequence_status.is_finished():
                    break
                time.sleep(0)

            makedirs.assert_called_with(PosixPath("output/test/"), exist_ok=True)
            self.assertEqual(len(output_0.write.call_args_list), 4)
            output_0.write.assert_has_calls(
                [
                    call("seconds,voltage\n"),
                    call("0.0,15.0\n"),
                    call("0.5,15.0\n"),
                    call("1.0,15.0\n"),
                ]
            )
            output_1.write.assert_has_calls(
                [
                    call("seconds,voltage\n"),
                    call("0.0,15.0\n"),
                    call("0.5,15.0\n"),
                    call("1.0,15.0\n"),
                ]
            )
def volume_dict_pairs_to_str(args, keyval_arr):
    key = keyval_arr[0]
    val = keyval_arr[1]
    if key == 'read_only':
        return 'readonly=' + str(val)
    if key == 'source':
        if val.startswith('~'):
            path = PosixPath(str(val)).expanduser()
            return 'source=' + str(os.path.normpath(path.absolute()))
        if val.startswith('.'):
            dirname = os.path.dirname(args.composefile)
            path = PosixPath(dirname, str(val))
            return 'source=' + str(os.path.normpath(path.absolute()))
        return 'source=' + str(val)
    return key + '=' + str(val)
Example #35
0
def condor_submit(workspace: str | os.PathLike) -> None:
    """Execute condor_submit on the condor.sub file in a workspace.

    Parameters
    ----------
    workspace : str or os.PathLike
        the workspace containing the condor.sub file

    """
    ws = PosixPath(workspace).resolve()
    proc = subprocess.Popen(
        ["condor_submit", str(ws / "condor.sub")],
        stdout=subprocess.PIPE,
        stderr=subprocess.PIPE,
    )
    out, err = proc.communicate()
    try:
        log_out = out.decode("utf-8")
    except AttributeError:
        pass
    log.info(log_out)
Example #36
0
    def _init_skeleton(
        self,
        input_files: List[str],
        name: Optional[str] = None,
        tree_name: str = "WtLoop_nominal",
        weight_name: str = "weight_nominal",
        label: Optional[int] = None,
        auxlabel: Optional[int] = None,
        TeXlabel: Optional[str] = None,
    ) -> None:
        """Default initialization - should only be called by internal
        staticmethods ``from_root``, ``from_pytables``, ``from_h5``

        Parameters
        ----------
        input_files:
          List of input files
        name:
          Name of the dataset (if none use first file name)
        tree_name:
          Name of tree which this dataset originated from
        weight_name:
          Name of the weight branch
        label:
          Give dataset an integer based label
        auxlabel:
          Give dataset an integer based auxiliary label
        TeXlabel:
          LaTeX form label
        """
        self._files = [PosixPath(f).resolve(strict=True) for f in input_files]
        if name is None:
            self.name = str(self.files[0].parts[-1])
        else:
            self.name = name
        self.weight_name = weight_name
        self.tree_name = tree_name
        self.label = label
        self.auxlabel = auxlabel
        self.TeXlabel = TeXlabel
Example #37
0
def qc_report_run_info(run_path: Path, qc_report_glob='*_QCreport*.csv'):
    """get general run info from GT's QCreport file"""
    try:
        current_app.logger.info('Parsing QCreport file')
        qc_info = {}
        qc_report_fieldnames = [
            'Project',
            # 'Application',
            'Sequence Protocol',
            'Sample Size',
            'Fastq Files',
            'Date Report',
        ]
        # N.B. line formats of this report are: "Project: 18-weinstock-005,,,,,"

        qc_report_list = list(run_path.glob(qc_report_glob))
        # current_app.logger.debug('qc_report_list: %s', qc_report_list)
        qc_report_csv = qc_report_list[0]
        qcr_lines = read_file_text(qc_report_csv)
        qc_info = {'GT Project': None}  # first item in display

        qcr_rows = [r.split(',') for r in qcr_lines]
        # current_app.logger.debug('length qcr_rows: %s', len(qcr_rows))

        for row in qcr_rows:
            # current_app.logger.debug('qcr_row: %s', str(row))
            for fld in qc_report_fieldnames:
                if fld in row[0]:
                    # current_app.logger.debug('fld: %s', fld)
                    [f1, f2] = row[0].replace(',', '').split(': ')
                    # current_app.logger.debug('f1,f2: %s, %s', f1, f2)
                    qc_info.update({f1: f2})

        qc_info['GT Project'] = qc_info.pop('Project', None)
        current_app.logger.debug('qc_info: %s', qc_info)

    except Exception as e:
        current_app.logger.exception('reading from run' 's QCreport csv file!')
    finally:
        return qc_info
Example #38
0
    def on_epoch_end(self, epoch, logs=None):
        """
        Note: We immediately increment epoch
        from index-from-0 to index-from-1
        to match the TensorFlow output.
        Normally, ckpts/best is the best saved state,
              and ckpts/last is the last saved state.
        Procedure:
        1. Write current state to ckpts/work
        2. Rename ckpts/work to ckpts/epoch/NNN
        3. If best, link ckpts/best to ckpts/epoch/NNN
        4. Link ckpts/last to ckpts/epoch/NNN
        5. Clean up old ckpts according to keep policy
        """

        epoch += 1

        dir_root = PosixPath(self.ckpt_directory).resolve()
        dir_work = dir_root / "ckpts/work"
        dir_best = dir_root / "ckpts/best"  # a soft link
        dir_last = dir_root / "ckpts/last"  # a soft link
        dir_epochs = dir_root / "ckpts/epochs"
        dir_this = dir_epochs / ("%03i" % epoch)

        if not self.save_check(logs, epoch):
            return
        if os.path.exists(dir_this):
            self.debug("remove:  '%s'" % self.relpath(dir_this))
            shutil.rmtree(dir_this)
        os.makedirs(dir_epochs, exist_ok=True)
        os.makedirs(dir_work, exist_ok=True)
        self.write_model(dir_work, epoch)
        self.debug("rename:  '%s' -> '%s'" %
                   (self.relpath(dir_work), self.relpath(dir_this)))
        os.rename(dir_work, dir_this)
        self.epochs.append(epoch)
        if self.epoch_best == epoch:
            self.symlink(dir_this, dir_best)
        self.symlink(dir_this, dir_last)
        self.clean(epoch)
Example #39
0
def files_on_index(
        index_url: str,
        project_name: str) -> Iterator[Tuple[str, Optional[Tuple[str, str]]]]:
    """Iterate files available on an index for a given project name."""
    project_name = project_name.replace("_", "-")
    base_url = urljoin(index_url, project_name + "/")

    r = requests.get(base_url)
    if r.status_code == 404:
        # project not found on this index
        return
    r.raise_for_status()
    parser = etree.HTMLParser()
    tree = etree.parse(StringIO(r.text), parser)
    for a in tree.iterfind("//a"):
        parsed_url = urlparse(a.get("href"))
        p = PosixPath(parsed_url.path)
        if parsed_url.fragment:
            hash_type, hash_value = parsed_url.fragment.split("=", 2)[:2]
            yield p.name, (hash_type, hash_value)
        else:
            yield p.name, None
Example #40
0
 def relative_to(self, *other):
     # produce a PosixPath object again
     result = super(PosixPath, self).relative_to(*other)
     return PosixPath._from_parsed_parts(result._drv, result._root, result._parts)
Example #41
0
def pt_run_dir(tmp_path: pathlib.PosixPath):
    dir_from = pathlib.Path('tests/test-data/spc-and-methanol-run')
    tmp_path = tmp_path.joinpath('spc-and-methanol-run')
    # str needed for Python 3.5
    shutil.copytree(str(dir_from), str(tmp_path))
    return tmp_path
Example #42
0
def broken_image_file():
    img = Path(__file__).parent / 'fixtureimage.png.broken'
    assert img.exists(), 'image not found'
    return img
Example #43
0
def png_file():
    img = Path(__file__).parent / 'fixtureimage.png'
    assert img.exists(), 'image not found'
    return img