コード例 #1
2
    def from_ckpt(cls, ckpt_file, key=None, strict=True):
        """
        Create network from a saved checkpoint.

        :param ckpt_file: File containing saved checkpoint.
        :param key: Function of one argument used to extract the network state_dict (same as built-in "sort" key)
        :param strict: Strictly enforce matching keys between the checkpoint and the model.
        :return: Restored class
        """
        ckpt_dict = torch.load(ckpt_file)
        ckpt_dict = key(ckpt_dict) if key else ckpt_dict

        manager = nullcontext() if strict else suppress(KeyError)
        with manager:
            kwargs = {k: ckpt_dict[k] for k in cls.__dataclass_fields__}

        model = cls(**kwargs)
        model.load_state_dict(ckpt_dict, strict=strict)
        return model
コード例 #2
1
ファイル: remotejit.py プロジェクト: guilhermeleobas/rbc
    def call(self, fullname: str, arguments: tuple) -> Data:
        """Call JIT compiled function

        Parameters
        ----------
        fullname : str
          Specify the full name of the function that is in form
          "<name><mangled signature>"
        arguments : tuple
          Specify the arguments to the function.
        """
        # if we are using a tracing allocator, automatically detect memory leaks
        # at each call.
        if self.use_tracing_allocator:
            leak_detector = tracing_allocator.new_leak_detector()
        else:
            leak_detector = nullcontext()
        with leak_detector:
            return self._do_call(fullname, arguments)
コード例 #3
1
def _(self):
    to_clear = self.value
    cm = nullcontext()
    if isinstance(to_clear, elements.Access):
        base, to_clear = _access_all_but_last(to_clear)
        if isinstance(to_clear, elements.Reduce):
            to_clear = context(to_clear)
        cm = context.now_access(base)
    if isinstance(to_clear, elements.Reference):
        to_clear = to_clear.value
    with cm:
        with suppress(LookupError):
            try:
                if isinstance(to_clear, elements.RawAccessor):
                    context.accessing.raw_clear(context(to_clear))
                else:
                    del context[to_clear]
            except TypeError:
                raise RollitTypeError()
コード例 #4
1
ファイル: utils.py プロジェクト: kijanac/materia
    def __call__(self):
        mkdir_safe(self.work_dir)

        if self.temp:
            cm = tempfile.TemporaryDirectory(dir=self.work_dir)
        else:
            cm = contextlib.nullcontext(self.work_dir)

        with cm as wd:
            try:
                old_temp, self.temp = copy.copy(self.temp), False
                old_work_dir, self.work_dir = copy.copy(self.work_dir), wd

                yield _IO(
                    wd,
                    expand(self.inp, wd) if self.inp is not None else None,
                    expand(self.out, wd) if self.out is not None else None,
                )
            finally:
                self.temp, self.work_dir = old_temp, old_work_dir
コード例 #5
1
ファイル: test_valuespec.py プロジェクト: PLUTEX/checkmk
def test_host_address_validate_value(
    value_type: ValueType,
    value: str,
    allow_host_name: bool,
    allow_ipv4_address: bool,
    allow_ipv6_address: bool,
) -> None:
    expected_valid = (
        (value_type is ValueType.name and allow_host_name)
        or (value_type is ValueType.ipv4 and allow_ipv4_address)
        or (value_type is ValueType.ipv6 and allow_ipv6_address)
    )
    # mypy is wrong about the nullcontext object type :-(
    with pytest.raises(MKUserError) if not expected_valid else nullcontext():  # type: ignore[attr-defined]
        vs.HostAddress(
            allow_host_name=allow_host_name,
            allow_ipv4_address=allow_ipv4_address,
            allow_ipv6_address=allow_ipv6_address,
            allow_empty=False,
        ).validate_value(value, "varprefix")
コード例 #6
1
    def save_binary(self, file: Union[str, BinaryIO]) -> None:
        """Save array elements into a binary file.

        Comparing to :meth:`save_json`, it is faster and the file is smaller, but not human-readable.

        :param file: File or filename to which the data is saved.
        """
        if hasattr(file, 'write'):
            file_ctx = nullcontext(file)
        else:
            file_ctx = open(file, 'wb')

        with file_ctx as fp:
            dap = DocumentArrayProto()
            if self._docs_proto:
                if isinstance(self._docs_proto[0], DocumentProto):
                    dap.docs.extend(self._docs_proto)
                else:
                    dap.docs.extend([d.proto for d in self._docs_proto])
            fp.write(dap.SerializeToString())
コード例 #7
1
 def parse_file(cls, file_path_or_fp: Union[str, IO], device_type: str, device_id: str, **parser_kwargs) \
         -> Tuple[List[LogRecord], Optional[float], Optional[float]]:
     """
     Class method to parse records from a log file.
     :param file_path_or_fp: a file path, or an already-opened IO file object
     :param device_type: the device type
     :param device_id: the device id
     :param parser_kwargs: additional keyword arguments for log lines parser
     :return: a tuple with (log_records, average_records_interval, average_sync_interval)
     """
     file_context: ContextManager[IO]
     if isinstance(file_path_or_fp, str):
         file_context = open(file_path_or_fp, 'r')
     else:
         file_context = nullcontext(file_path_or_fp)
     with file_context as fp:
         return cls.parse_iter(fp,
                               device_type=device_type,
                               device_id=device_id,
                               **parser_kwargs)
コード例 #8
1
 def _cfn(*args, **kwargs):
     import os
     os.environ.update({
         "LOCAL_RANK": str(i),
         "LOCAL_WORLD_SIZE": str(nprocs)
     })
     try:
         import sys
         from mpify import global_imports
         # import env into '__main__', which can be in a subprocess here.
         g = sys.modules['__main__'].__dict__
         global_imports(imports.split('\n'), g)
         g.update(env)
         with cm or nullcontext():
             r = fn(*args, **kwargs)
         if l: l[i] = r
         return r
     finally:
         map(lambda k: os.environ.pop(k, None),
             ("LOCAL_RANK", "LOCAL_WORLD_SIZE"))
コード例 #9
1
    def load(file: Union[str, TextIO]) -> 'DocumentArray':
        """Load array elements from a JSON file.

        :param file: File or filename to which the data is saved.

        :return: a DocumentArray object
        """

        if hasattr(file, 'read'):
            file_ctx = nullcontext(file)
        else:
            file_ctx = open(file)

        from jina import Document

        da = DocumentArray()
        with file_ctx as fp:
            for v in fp:
                da.append(Document(v))
        return da
コード例 #10
0
 def on_draw_event(self, widget, ctx):
     """GtkDrawable draw event."""
     with (self.toolbar._wait_cursor_for_draw_cm() if self.toolbar
           else nullcontext()):
         self._renderer.set_context(ctx)
         allocation = self.get_allocation()
         Gtk.render_background(
             self.get_style_context(), ctx,
             allocation.x, allocation.y,
             allocation.width, allocation.height)
         self._render_figure(allocation.width, allocation.height)
コード例 #11
0
ファイル: backend_agg.py プロジェクト: QuLogic/matplotlib
 def draw(self):
     """
     Draw the figure using the renderer.
     """
     self.renderer = self.get_renderer(cleared=True)
     # Acquire a lock on the shared font cache.
     with RendererAgg.lock, \
          (self.toolbar._wait_cursor_for_draw_cm() if self.toolbar
           else nullcontext()):
         self.figure.draw(self.renderer)
         # A GUI class may be need to update a window using this draw, so
         # don't forget to call the superclass.
         super().draw()
コード例 #12
0
    def __init__(self, *args, **kwargs):
        if import_exc is not None:
            raise import_exc

        super(ThreadSafeBus, self).__init__(Bus(*args, **kwargs))

        # now, BusABC.send_periodic() does not need a lock anymore, but the
        # implementation still requires a context manager
        self.__wrapped__._lock_send_periodic = nullcontext()

        # init locks for sending and receiving separately
        self._lock_send = RLock()
        self._lock_recv = RLock()
コード例 #13
0
def start_active_span_follows_from(
    operation_name: str,
    contexts: Collection,
    child_of=None,
    start_time: Optional[float] = None,
    *,
    inherit_force_tracing=False,
    tracer=None,
):
    """Starts an active opentracing span, with additional references to previous spans

    Args:
        operation_name: name of the operation represented by the new span
        contexts: the previous spans to inherit from

        child_of: optionally override the parent span. If unset, the currently active
           span will be the parent. (If there is no currently active span, the first
           span in `contexts` will be the parent.)

        start_time: optional override for the start time of the created span. Seconds
            since the epoch.

        inherit_force_tracing: if set, and any of the previous contexts have had tracing
           forced, the new span will also have tracing forced.
        tracer: override the opentracing tracer. By default the global tracer is used.
    """
    if opentracing is None:
        return contextlib.nullcontext()  # type: ignore[unreachable]

    references = [opentracing.follows_from(context) for context in contexts]
    scope = start_active_span(
        operation_name,
        child_of=child_of,
        references=references,
        start_time=start_time,
        tracer=tracer,
    )

    if inherit_force_tracing and any(
            is_context_forced_tracing(ctx) for ctx in contexts):
        force_tracing(scope.span)

    return scope
コード例 #14
0
def test_theme_colors(pg_backend, theme, monkeypatch, tmp_path):
    """Test that theme colors propagate properly."""
    darkdetect = pytest.importorskip('darkdetect')
    monkeypatch.setenv('_MNE_FAKE_HOME_DIR', str(tmp_path))
    monkeypatch.delenv('MNE_BROWSER_THEME', raising=False)
    # make it seem like the system is always in light mode
    monkeypatch.setattr(darkdetect, 'theme', lambda: 'light')
    raw = RawArray(np.zeros((1, 1000)), create_info(1, 1000., 'eeg'))
    _, api = _check_qt_version(return_api=True)
    if api in ('PyQt6', 'PySide6') and theme == 'dark':
        ctx = pytest.warns(RuntimeWarning, match='not yet supported')
        return_early = True
    else:
        ctx = nullcontext()
        return_early = False
    with ctx:
        fig = raw.plot(theme=theme)
    if return_early:
        return  # we could add a ton of conditionals below, but KISS
    is_dark = _qt_is_dark(fig)
    # on Darwin these checks get complicated, so don't bother for now
    if sys.platform != 'darwin':
        if theme == 'dark':
            assert is_dark, theme
        elif theme == 'light':
            assert not is_dark, theme
        else:
            got_dark = darkdetect.theme().lower() == 'dark'
            assert is_dark is got_dark

    def assert_correct_darkness(widget, want_dark):
        __tracebackhide__ = True  # noqa
        # This should work, but it just picks up the parent in the errant case!
        bgcolor = widget.palette().color(widget.backgroundRole()).getRgbF()[:3]
        dark = rgb_to_hls(*bgcolor)[1] < 0.5
        assert dark == want_dark, f'{widget} dark={dark} want_dark={want_dark}'
        # ... so we use a more direct test
        colors = _pixmap_to_ndarray(widget.grab())[:, :, :3]
        dark = colors.mean() < 0.5
        assert dark == want_dark, f'{widget} dark={dark} want_dark={want_dark}'

    for widget in (fig.mne.toolbar, fig.statusBar()):
        assert_correct_darkness(widget, is_dark)
コード例 #15
0
    def forward(self, *args, **kargs):
        class_pred = None
        self.step_count += 1

        # No need to track gradients if not finetuning backbone
        update_backbone = self.finetune_classifier or self.finetune_selfsup
        cm = nullcontext() if update_backbone else torch.no_grad()

        # Get model output
        with cm:
            feats, dense_feats, s_out = self.backbone(*args, **kargs)

        if self.dense_pred:
            # Flatten dense per-pixel features for FC/MLP processing
            feats = dense_feats.permute(0, 2, 3,
                                        1).reshape(-1, dense_feats.shape[1])

        if not self.finetune_selfsup and s_out is not None:
            if isinstance(s_out, list) or isinstance(s_out, tuple):
                s_out = [s.detach() for s in s_out]
            else:
                s_out = s_out.detach()

        if self.run_classifier:
            warming_up = self.step_count < self.warmup_fc
            if warming_up or not self.finetune_classifier:
                feats = feats.detach()

            fc_pred = [fc(feats) for fc in self.out_fc]
            mlp_pred = [mlp(feats) for mlp in self.out_mlp]
            class_pred = [fc_pred, mlp_pred]

        if self.dense_pred:
            # Reshape flattened output to dense per-pixel output
            feats = dense_feats
            if class_pred is not None:
                d = feats.shape
                class_pred = [[
                    p.reshape(d[0], d[2], d[3], -1).permute(0, 3, 1, 2)
                    for p in tmp_pred
                ] for tmp_pred in class_pred]

        return class_pred, s_out, feats
コード例 #16
0
def patch_bond_bridge(enabled: bool = True,
                      return_value: dict | None = None,
                      side_effect=None):
    """Patch Bond API bridge endpoint."""
    if not enabled:
        return nullcontext()

    if return_value is None:
        return_value = {
            "name": "bond-name",
            "location": "bond-location",
            "bluelight": 127,
        }

    return patch(
        "homeassistant.components.bond.Bond.bridge",
        return_value=return_value,
        side_effect=side_effect,
    )
コード例 #17
0
    def infer(self, sentence, is_test=False, is_comp=False):
        cm = torch.no_grad() if is_comp else nullcontext()

        with cm:
            word_idx_tensor, tag_idx_tensor, true_tree_heads = sentence

            if self.dropout and not is_test:
                for i, word in enumerate(word_idx_tensor[0]):
                    actual_word_idx = word.item()
                    if actual_word_idx != self.unknown_word_idx and actual_word_idx != self.root_idx:
                        freq_of_word = self.word_dict[self.word_list[actual_word_idx]]
                        prob_word = float(self.dropout) / (self.dropout + freq_of_word)
                        if random.random() < prob_word:
                            word_idx_tensor[0, i] = self.unknown_word_idx
                            tag_idx_tensor[0, i] = self.unknown_tag_idx

            # Pass word_idx and tag_idx through their embedding layers
            tag_embbedings = self.tag_embedder(tag_idx_tensor.to(self.device))
            word_embbedings = self.word_embedder(word_idx_tensor.to(self.device))

            # Concat both embedding outputs
            input_embeddings = torch.cat((word_embbedings, tag_embbedings), dim=2)

            # Get Bi-LSTM hidden representation for each word+tag in sentence
            lstm_output, _ = self.encoder(input_embeddings.view(1, input_embeddings.shape[1], -1))

            # Get score for each possible edge in the parsing graph, construct score matrix
            scores = self.mlp_edge_scorer(lstm_output)

            # Use Chu-Liu-Edmonds to get the predicted parse tree T' given the calculated score matrix
            seq_len = lstm_output.size(1)
            predicted_tree_heads, _ = self.decoder(scores.data.cpu().numpy(), seq_len, False)

            if not is_comp:
                true_tree_heads = true_tree_heads.squeeze(0)
                # Calculate the negative log likelihood loss described above
                probs_logged = self.log_soft_max(scores)
                loss = KiperwasserDependencyParser.nll_loss(probs_logged, true_tree_heads, self.device)
                return loss, torch.from_numpy(predicted_tree_heads)

            else:
                return torch.from_numpy(predicted_tree_heads)
コード例 #18
0
def test_missing_rdkit_module_error():
    """Test if different functions return correct error when *rdkit* is not available"""
    if find_spec("rdkit") is not None:
        context = mock.patch.dict(sys.modules, {"rdkit": None})
    else:
        context = nullcontext()

    expected_msg = "Conda package 'rdkit' is required for this functionality."
    with context:
        reload(matchms.utils)
        mol_input = "C[Si](Cn1cncn1)(c1ccc(F)cc1)"
        with pytest.raises(ImportError) as msg:
            _ = matchms.utils.mol_converter(mol_input, "smiles", "inchikey")
        assert expected_msg in str(
            msg.value), "Expected different ImportError."

        with pytest.raises(ImportError) as msg:
            _ = matchms.utils.is_valid_inchi("test")
        assert expected_msg in str(
            msg.value), "Expected different ImportError."

        with pytest.raises(ImportError) as msg:
            _ = matchms.utils.is_valid_smiles("test")
        assert expected_msg in str(
            msg.value), "Expected different ImportError."

        with pytest.raises(ImportError) as msg:
            _ = matchms.utils.derive_fingerprint_from_inchi(
                mol_input, "test", 0)
        assert expected_msg in str(
            msg.value), "Expected different ImportError."

        with pytest.raises(ImportError) as msg:
            _ = matchms.utils.derive_fingerprint_from_smiles(
                mol_input, "test", 0)
        assert expected_msg in str(
            msg.value), "Expected different ImportError."

        with pytest.raises(ImportError) as msg:
            _ = matchms.utils.mol_to_fingerprint(mol_input, "test", 0)
        assert expected_msg in str(
            msg.value), "Expected different ImportError."
コード例 #19
0
ファイル: cli.py プロジェクト: livestalker/smtpdev
def main(smtp_host, smtp_port, web_host, web_port, develop, debug, maildir):
    if debug:
        logging.basicConfig(level=logging.DEBUG)
    else:
        logging.basicConfig(level=logging.INFO)

    logger.info("SMTP server is running on %s:%s", smtp_host, smtp_port)
    logger.info("Web server is running on %s:%s", web_host, web_port)

    if develop:
        logger.info("Running in developer mode")

    dir_context = TemporaryDirectory if maildir is None else lambda: nullcontext(
        maildir)

    with dir_context() as maildir_path:
        maildir_path = pathlib.Path(maildir_path)
        maildir_path.mkdir(parents=True, exist_ok=True)

        logger.info("Mail directory: %s", maildir_path)

        config = Configuration(
            smtp_host=smtp_host,
            smtp_port=smtp_port,
            web_host=web_host,
            web_port=web_port,
            develop=develop,
            debug=debug,
        )

        maildir = Maildir(maildir_path / "maildir")
        mailbox = MailboxHandler(maildir_path / "maildir")

        controller = Controller(mailbox,
                                hostname=config.smtp_host,
                                port=config.smtp_port)
        web_server = WebServer(config, maildir)
        mailbox.register_message_observer(web_server)

        controller.start()
        web_server.start()
        controller.stop()
コード例 #20
0
    def inline_function_call_into_circuit(
            self, fcall: FunctionCallExpr) -> Union[Expression, TupleExpr]:
        """
        Inline an entire function call into the current circuit.

        :param fcall: Function call to inline
        :return: Expression (1 retval) / TupleExpr (multiple retvals) with return value(s)
        """
        assert isinstance(fcall.func,
                          LocationExpr) and fcall.func.target is not None
        fdef = fcall.func.target
        with self._remapper.remap_scope(fcall.func.target.body):
            with nullcontext(
            ) if fcall.func.target.idf.name == '<stmt_fct>' else self.circ_indent_block(
                    f'INLINED {fcall.code()}'):
                # Assign all arguments to temporary circuit variables which are designated as the current version of the parameter idfs
                for param, arg in zip(fdef.parameters, fcall.args):
                    self.phi.append(
                        CircComment(f'ARG {param.idf.name}: {arg.code()}'))
                    with self.circ_indent_block():
                        self.create_new_idf_version_from_value(param.idf, arg)

                # Visit the untransformed target function body to include all statements in this circuit
                inlined_body = deep_copy(fdef.original_body,
                                         with_types=True,
                                         with_analysis=True)
                self._circ_trafo.visit(inlined_body)
                fcall.statement.pre_statements += inlined_body.pre_statements

                # Create TupleExpr with location expressions corresponding to the function return values as elements
                ret_idfs = [
                    self._remapper.get_current(vd.idf)
                    for vd in fdef.return_var_decls
                ]
                ret = TupleExpr([
                    IdentifierExpr(idf.clone()).as_type(idf.t)
                    for idf in ret_idfs
                ])
        if len(ret.elements) == 1:
            # Unpack 1-length tuple
            ret = ret.elements[0]
        return ret
コード例 #21
0
def _index(path: Path, opts: Options) -> Results:
    logger = get_logger()

    cores = use_cores()
    if cores is None:  # do not use cores
        # todo use ExitStack instead?
        pool = nullcontext()
        mapper = map  # dummy pool
    else:
        workers = None if cores == 0 else cores
        pool = Pool(workers)  # type: ignore
        mapper = pool.map  # type: ignore

    # iterate over resolved paths, to avoid duplicates
    def rit() -> Iterable[Path]:
        it = traverse(path, follow=opts.follow, ignore=IGNORE)
        for p in it:
            if any(fnmatch(str(p), o) for o in opts.ignored):
                # TODO not sure if should log here... might end up with quite a bit of logs
                logger.debug('ignoring %s: user ignore rules', p)
                continue
            if any(i in p.parts for i in
                   IGNORE):  # meh, not very efficient.. pass to traverse??
                logger.debug('ignoring %s: default ignore rules', p)
                continue

            p = p.resolve()
            if not os.path.exists(p):
                logger.debug('ignoring %s: broken symlink?', p)
                continue

            yield p

    from more_itertools import unique_everseen
    it = unique_everseen(rit())

    with pool:
        for r in mapper(_index_file_aux, it, itertools.repeat(opts)):
            if isinstance(r, Exception):
                yield r
            else:
                yield from r
コード例 #22
0
    def __init__(self, initial_value, *args, dtype=None, **kwargs):
        """Overrides tf.Variable to fix VarHandleOp placements."""
        # Variables by default use the current device scope for placement. This
        # wrapper has them follow the initial value's placement instead (which will
        # be the DTensor device if the initial value has a layout).
        if callable(initial_value):
            initial_value = initial_value()

        initial_value = ops.convert_to_tensor(initial_value, dtype=dtype)
        variable_device = initial_value.device
        self._save_as_bf16 = False
        # TODO(b/159035705): The following code enables variable creation inside
        # a tf.function. However, it requires a global dtensor device.
        # if not variable_device and not tf.executing_eagerly():
        #   try:
        #     initial_value.op.get_attr("_layout")
        #   except ValueError:
        #     pass
        #   else:
        #     # The initial value is a DTensor, but because the DTensor device is
        #     # only active during eager execution at the moment we need to
        #     # translate that into a placement for the eager VarHandleOp.
        #     variable_device = _dtensor_device().name
        with ops.device(variable_device):
            # If initial tensor assigned to DVariable is DTensor, record the layout of
            # the resource so that this can be queried.
            self.layout = None
            if context.executing_eagerly():
                try:
                    self.layout = api.fetch_layout(initial_value)
                except (errors.InvalidArgumentError, errors.NotFoundError):
                    # For Non-DTensor tensors, fetch layout results in expected
                    # InvalidArgument or NotFoundError depending on whether the API
                    # is called within DTensor device scope or not.
                    self.layout = None
                    pass
            mesh = self.layout.mesh if self.layout else None
            with api.run_on(mesh) if mesh else contextlib.nullcontext():
                super(DVariable, self).__init__(initial_value,
                                                *args,
                                                dtype=dtype,
                                                **kwargs)
コード例 #23
0
def test_autofmt_xdate(which):
    date = [
        '3 Jan 2013', '4 Jan 2013', '5 Jan 2013', '6 Jan 2013', '7 Jan 2013',
        '8 Jan 2013', '9 Jan 2013', '10 Jan 2013', '11 Jan 2013',
        '12 Jan 2013', '13 Jan 2013', '14 Jan 2013'
    ]

    time = [
        '16:44:00', '16:45:00', '16:46:00', '16:47:00', '16:48:00', '16:49:00',
        '16:51:00', '16:52:00', '16:53:00', '16:55:00', '16:56:00', '16:57:00'
    ]

    angle = 60
    minors = [1, 2, 3, 4, 5, 6, 7]

    x = mdates.datestr2num(date)
    y = mdates.datestr2num(time)

    fig, ax = plt.subplots()

    ax.plot(x, y)
    ax.yaxis_date()
    ax.xaxis_date()

    ax.xaxis.set_minor_locator(AutoMinorLocator(2))
    with warnings.catch_warnings():
        warnings.filterwarnings(
            'ignore',
            'FixedFormatter should only be used together with FixedLocator')
        ax.xaxis.set_minor_formatter(FixedFormatter(minors))

    with (pytest.warns(mpl.MatplotlibDeprecationWarning)
          if which is None else nullcontext()):
        fig.autofmt_xdate(0.2, angle, 'right', which)

    if which in ('both', 'major', None):
        for label in fig.axes[0].get_xticklabels(False, 'major'):
            assert int(label.get_rotation()) == angle

    if which in ('both', 'minor'):
        for label in fig.axes[0].get_xticklabels(True, 'minor'):
            assert int(label.get_rotation()) == angle
コード例 #24
0
def test_azure_public_container():
    for error, accountname in [
        (
            None,
            "tartanair",
        ),  # https://azure.microsoft.com/en-us/services/open-datasets/catalog/tartanair-airsim-simultaneous-localization-and-mapping/
        (bf.Error,
         "accountname"),  # an account that exists but that is not public
        (FileNotFoundError, AS_INVALID_ACCOUNT),  # account that does not exist
    ]:
        ctx = contextlib.nullcontext()
        if error is not None:
            ctx = pytest.raises(error)
        with ctx:
            with bf.BlobFile(
                    f"https://{accountname}.blob.core.windows.net/tartanair-release1/abandonedfactory/Easy/P000/image_left/000000_left.png",
                    "rb",
            ) as f:
                contents = f.read()
                assert contents.startswith(AZURE_PUBLIC_URL_HEADER)
コード例 #25
0
ファイル: _setuplib.py プロジェクト: kamidzi/bcpc-build
    def download_file(self, url, keep=True, **kwargs):
        if keep:
            cm = nullcontext(tempfile.mkdtemp())
        else:
            cm = tempfile.TemporaryDirectory()

        with cm as tmpdir:
            try:
                local_filename = os.path.join(tmpdir, url.split('/')[-1])
                r = self.get(url, stream=True)
                try:
                    with open(local_filename, 'wb') as f:
                        shutil.copyfileobj(r.raw, f)
                except OSError as e:
                    raise RuntimeError('Download of "{}" to {} failed'
                                       ''.format(url, local_filename)) from e

                yield os.path.abspath(local_filename)
            finally:
                pass
コード例 #26
0
ファイル: flatpakutils.py プロジェクト: eocanha/webkit
 def execute_command(self, args, stdout=None, stderr=None, env=None, keep_signals=True):
     if keep_signals:
         ctx_manager = nullcontext()
     else:
         ctx_manager = disable_signals()
     _log.debug('Running: %s\n' % ' '.join(string_utils.decode(arg) for arg in args))
     result = 0
     with ctx_manager:
         try:
             result = subprocess.check_call(args, stdout=stdout, stderr=stderr, env=env)
         except subprocess.CalledProcessError as err:
             if self.verbose:
                 cmd = ' '.join(string_utils.decode(arg) for arg in err.cmd)
                 message = "'%s' returned a non-zero exit code." % cmd
                 if stderr:
                     with open(stderr.name, 'r') as stderrf:
                         message += " Stderr: %s" % stderrf.read()
                 Console.error_message(message)
             return err.returncode
     return result
コード例 #27
0
 def sample(self, n, with_grad=False):
     """
     Samples from the Generator.
     :param n: Number of instance-space samples to generate.
     :param with_grad: Whether the returned samples should track
     gradients or not. I.e., whether they should be part of the generator's
     computation graph or standalone tensors.
     :return: A batch of samples, shape (N,C,H,W).
     """
     device = next(self.parameters()).device
     # TODO: Sample from the model.
     # Generate n latent space samples and return their reconstructions.
     # Don't use a loop.
     # ====== YOUR CODE: ======
     import contextlib
     with torch.no_grad() if not with_grad else contextlib.nullcontext():
         z = torch.randn((n, self.z_dim), device=device, requires_grad=with_grad)
         samples = self.forward(z)
     # ========================
     return samples
コード例 #28
0
    def forward(self,
                text,
                labels,
                prediction_network=None,
                update_memory=False,
                no_grad=False):
        if prediction_network is None:
            prediction_network = self.pn
        input_dict = self.pn.encode_text(text)
        context_manager = torch.no_grad() if no_grad else nullcontext()
        with context_manager:
            representation = prediction_network(input_dict,
                                                out_from="transformers")
            logits = prediction_network(representation, out_from="linear")

        if update_memory:
            self.memory.add_entry(embeddings=representation.detach(),
                                  labels=labels,
                                  query_result=None)
        return {"representation": representation, "logits": logits}
コード例 #29
0
ファイル: document.py プロジェクト: JoanFM/jina
    def save_binary(self, file: Union[str, BinaryIO]) -> None:
        """Save array elements into a binary file.

        Comparing to :meth:`save_json`, it is faster and the file is smaller, but not human-readable.

        :param file: File or filename to which the data is saved.
        """
        if hasattr(file, 'write'):
            file_ctx = nullcontext(file)
        else:
            if __windows__:
                file_ctx = open(file, 'wb', newline='')
            else:
                file_ctx = open(file, 'wb')

        with file_ctx as fp:
            dap = jina_pb2.DocumentArrayProto()
            if self._pb_body:
                dap.docs.extend(self._pb_body)
            fp.write(dap.SerializePartialToString())
コード例 #30
0
    def parse(self, str_input: str, normalize=True) -> torch.tensor:
        if self.training:
            logging.warning("parse() is meant to be called in eval mode.")

        if normalize and self.text_normalizer_call is not None:
            str_input = self.text_normalizer_call(str_input, **self.text_normalizer_call_kwargs)

        if self.learn_alignment:
            eval_phon_mode = contextlib.nullcontext()
            if hasattr(self.vocab, "set_phone_prob"):
                eval_phon_mode = self.vocab.set_phone_prob(prob=1.0)

            # Disable mixed g2p representation if necessary
            with eval_phon_mode:
                tokens = self.parser(str_input)
        else:
            tokens = self.parser(str_input)

        x = torch.tensor(tokens).unsqueeze_(0).long().to(self.device)
        return x
コード例 #31
0
async def add_process_time_header(request: fa.Request, call_next):
    context_manager = nullcontext()
    if "profile" in request.query_params and "profile-type" in request.query_params:
        profile = ManagerProfile()
        type_profile = request.query_params.get("profile-type")
        profiler = profile.factory(type_profile, sync=False)
        context_manager = profiler.start()

    with context_manager:
        response = await call_next(request)

    if "profile" in request.query_params and "profile-type" in request.query_params:
        is_docker = os.environ.get("RUNNING_DOCKER_CONTAINER", False)
        path_profile = request.query_params.get("profile")
        render_browser = "render-browser" in request.query_params
        profiler.stop_and_write(
            path_profile, is_docker, api="api_scoring", render_browser=render_browser
        )

    return response
コード例 #32
-1
ファイル: tool.py プロジェクト: Arusekk/snakeoil
    def main(self):
        """Execute the main script function."""
        exitstatus = -10

        # ignore broken pipes
        signal(SIGPIPE, SIG_DFL)

        # suppress warning level log output and below in quiet mode
        if self.parser.verbosity >= 0 or self.parser.debug:
            suppress_warnings = nullcontext()
        else:
            suppress_warnings = suppress_logging(logging.WARNING)

        try:
            with suppress_warnings:
                self.options, func = self.parse_args(args=self.args,
                                                     namespace=self.options)
                exitstatus = func(self.options, self.out, self.err)
        except SystemExit as e:
            # handle argparse or other third party modules using sys.exit internally
            exitstatus = e.code
        except KeyboardInterrupt:
            self._errfile.write('keyboard interrupted- exiting')
            if self.parser.debug:
                self._errfile.write('\n')
                traceback.print_exc()
            signal(SIGINT, SIG_DFL)
            os.killpg(os.getpgid(0), SIGINT)
        except Exception as e:
            # handle custom execution-related exceptions
            self.out.flush()
            self.err.flush()
            self.handle_exec_exception(e)

        if self.options is not None:
            # set terminal title on exit
            if exitstatus:
                self.out.title(f'{self.options.prog} failed')
            else:
                self.out.title(f'{self.options.prog} succeeded')

        return exitstatus
コード例 #33
-1
ファイル: rom_project.py プロジェクト: SkyTemple/skytemple
 def prepare_save_model(self, name, assert_that=None):
     """
     Write the binary model for this type to the ROM object in memory.
     If assert_that is given, it is asserted, that the model matches the one on record.
     """
     context = self._opened_files_contexts[name] \
         if name in self._opened_files_contexts \
         else nullcontext(self._opened_files[name])
     with context as model:
         handler = self._file_handlers[name]
         logger.debug(
             f"Saving {name} in ROM. Model: {model}, Handler: {handler}")
         if handler == FileType.SIR0:
             logger.debug(f"> Saving as Sir0 wrapped data.")
             model = handler.wrap_obj(model)  # type: ignore
         if assert_that is not None:
             assert assert_that is model, "The model that is being saved must match!"
         binary_data = handler.serialize(model,
                                         **self._file_handler_kwargs[name])
         self._rom.setFileByName(name, binary_data)