示例#1
0
def create_introducer_clients(config, main_tub, _introducer_factory=None):
    """
    Read, validate and parse any 'introducers.yaml' configuration.

    :param _introducer_factory: for testing; the class to instantiate instead
        of IntroducerClient

    :returns: a list of IntroducerClient instances
    """
    if _introducer_factory is None:
        _introducer_factory = IntroducerClient

    # we return this list
    introducer_clients = []

    introducers = config.get_introducer_configuration()

    for petname, (furl, cache_path) in list(introducers.items()):
        ic = _introducer_factory(
            main_tub,
            furl.encode("ascii"),
            config.nickname,
            str(allmydata.__full_version__),
            str(_Client.OLDEST_SUPPORTED_VERSION),
            partial(_sequencer, config),
            cache_path,
        )
        introducer_clients.append(ic)
    return introducer_clients
示例#2
0
def _renderHTTP_exception(request, failure):
    try:
        text, code = humanize_failure(failure)
    except:
        log.msg("exception in humanize_failure")
        log.msg("argument was %s" % (failure, ))
        log.err()
        text = str(failure)
        code = None

    if code is not None:
        return _renderHTTP_exception_simple(request, text, code)

    accept = request.getHeader("accept")
    if not accept:
        accept = "*/*"
    if "*/*" in accept or "text/*" in accept or "text/html" in accept:
        request.setResponseCode(http.INTERNAL_SERVER_ERROR)
        return template.renderElement(
            request,
            tags.html(
                tags.head(tags.title(u"Exception"), ),
                tags.body(FailureElement(failure), ),
            ),
        )

    # use plain text
    traceback = failure.getTraceback()
    return _renderHTTP_exception_simple(
        request,
        traceback,
        http.INTERNAL_SERVER_ERROR,
    )
示例#3
0
def get_arg(
    req,
    argname,
    default=None,
    multiple=False
):  # type: (IRequest, Union[bytes,str], Any, bool) -> Union[bytes,Tuple[bytes],Any]
    """Extract an argument from either the query args (req.args) or the form
    body fields (req.fields). If multiple=False, this returns a single value
    (or the default, which defaults to None), and the query args take
    precedence. If multiple=True, this returns a tuple of arguments (possibly
    empty), starting with all those in the query args.

    :param TahoeLAFSRequest req: The request to consider.

    :return: Either bytes or tuple of bytes.
    """
    if isinstance(argname, str):
        argname = argname.encode("utf-8")
    if isinstance(default, str):
        default = default.encode("utf-8")
    results = []
    if argname in req.args:
        results.extend(req.args[argname])
    argname_unicode = str(argname, "utf-8")
    if req.fields and argname_unicode in req.fields:
        value = req.fields[argname_unicode].value
        if isinstance(value, str):
            value = value.encode("utf-8")
        results.append(value)
    if multiple:
        return tuple(results)
    if results:
        return results[0]
    return default
示例#4
0
    def _generate_name(self):
        """Fold up ranges. e.g. 1,2,3,4 -> 1:4"""
        port_start = None
        line_start, prev_line = None, None
        name = ''

        def get_num(name):
            return int(name[4:])

        for port, line in self.line_pairs:
            lineno = get_num(line)
            if port_start is None:
                port_start = get_num(port)
                name += '+port{}.'.format(port_start)

            if line_start is None:
                line_start = lineno
                name += str(line_start)
            elif lineno != prev_line + 1:
                # This line doesn't continue the existing range
                line_start = lineno
                if lineno != line_start:
                    name += ':{}'.format(prev_line)
                name += '+port{}.{}'.format(port_start, line_start)

            prev_line = lineno
        if lineno != line_start:
            name += ':{}'.format(prev_line)

        name = name.replace('.0:7', '')
        return name[1:]
示例#5
0
文件: s3.py 项目: yushu-liu/papermill
    def readdir(self, source, compressed=False, encoding='UTF-8'):
        """Iterates over a dir in s3 split on newline.

        Yields line in file in dir.

        """
        for f in self.listdir(source, keys=True):
            for l in self.read(str(f), compressed=compressed, encoding=encoding):
                yield l
示例#6
0
    def render_GET(self, req):
        t = str(get_arg(req, b"t", b"").strip(), "ascii")

        # t=info contains variable ophandles, so is not allowed an ETag.
        FIXED_OUTPUT_TYPES = ["", "json", "uri", "readonly-uri"]
        if not self.node.is_mutable() and t in FIXED_OUTPUT_TYPES:
            # if the client already has the ETag then we can
            # short-circuit the whole process.
            si = self.node.get_storage_index()
            if si and req.setETag(b'%s-%s' % (base32.b2a(si), t.encode("ascii") or b"")):
                return b""

        if not t:
            # just get the contents
            # the filename arrives as part of the URL or in a form input
            # element, and will be sent back in a Content-Disposition header.
            # Different browsers use various character sets for this name,
            # sometimes depending upon how language environment is
            # configured. Firefox sends the equivalent of
            # urllib.quote(name.encode("utf-8")), while IE7 sometimes does
            # latin-1. Browsers cannot agree on how to interpret the name
            # they see in the Content-Disposition header either, despite some
            # 11-year old standards (RFC2231) that explain how to do it
            # properly. So we assume that at least the browser will agree
            # with itself, and echo back the same bytes that we were given.
            filename = get_arg(req, "filename", self.name) or "unknown"
            d = self.node.get_best_readable_version()
            d.addCallback(lambda dn: FileDownloader(dn, filename))
            return d
        if t == "json":
            # We do this to make sure that fields like size and
            # mutable-type (which depend on the file on the grid and not
            # just on the cap) are filled in. The latter gets used in
            # tests, in particular.
            #
            # TODO: Make it so that the servermap knows how to update in
            # a mode specifically designed to fill in these fields, and
            # then update it in that mode.
            if self.node.is_mutable():
                d = self.node.get_servermap(MODE_READ)
            else:
                d = defer.succeed(None)
            if self.parentnode and self.name:
                d.addCallback(lambda ignored:
                    self.parentnode.get_metadata_for(self.name))
            else:
                d.addCallback(lambda ignored: None)
            d.addCallback(lambda md: _file_json_metadata(req, self.node, md))
            return d
        if t == "info":
            return MoreInfo(self.node)
        if t == "uri":
            return _file_uri(req, self.node)
        if t == "readonly-uri":
            return _file_read_only_uri(req, self.node)
        raise WebError("GET file: bad t=%s" % t)
示例#7
0
def list_instruments(server=None, module=None):
    """Returns a list of info about available instruments.

    May take a few seconds because it must poll hardware devices.

    It actually returns a list of specialized dict objects that contain
    parameters needed to create an instance of the given instrument. You can
    then get the actual instrument by passing the dict to
    :py:func:`~instrumental.drivers.instrument`.

    >>> inst_list = get_instruments()
    >>> print(inst_list)
    [<NIDAQ 'Dev1'>, <TEKTRONIX 'TDS 3032'>, <TEKTRONIX 'AFG3021B'>]
    >>> inst = instrument(inst_list[0])

    Parameters
    ----------
    server : str, optional
        The remote Instrumental server to query. It can be an alias from your instrumental.conf
        file, or a str of the form `(hostname|ip-address)[:port]`, e.g. '192.168.1.10:12345'. Is
        None by default, meaning search on the local machine.
    """
    if server is not None:
        from . import remote
        session = remote.client_session(server)
        return session.list_instruments()

    try:
        import visa
        try:
            inst_list = list_visa_instruments()
        except visa.VisaIOError:
            inst_list = []  # Hide visa errors
    except (ImportError, ConfigError):
        inst_list = []  # Ignore if PyVISA not installed or configured

    for mod_name in _acceptable_params:
        if module and module not in mod_name:
            continue

        try:
            log.info("Importing driver module '%s'", mod_name)
            mod = import_module('.' + mod_name, __package__)
        except Exception as e:
            # Module not supported
            log.info("Error when importing module %s: <<%s>>", mod_name,
                     str(e))
            continue

        try:
            inst_list.extend(mod.list_instruments())
        except AttributeError:
            # Module doesn't have a list_instruments() function
            continue
    return inst_list
示例#8
0
文件: s3.py 项目: yushu-liu/papermill
    def listglob(self, glb, **kwargs):
        """ Returns a list of the files matching a glob.

        Name must be in the form of s3://bucket/glob

        """
        r = []
        regex = re.compile('[*?\[]')
        for file in self.list(regex.split(glb, 1)[0], **kwargs):
            if fnmatch.fnmatch(str(file), glb):
                r.append(file)
        return r
示例#9
0
    def render_PUT(self, req):
        t = get_arg(req, b"t", b"").strip()
        replace = parse_replace_arg(get_arg(req, "replace", "true"))

        assert self.parentnode and self.name
        if req.getHeader("content-range"):
            raise WebError("Content-Range in PUT not yet supported",
                           http.NOT_IMPLEMENTED)
        if not t:
            return self.replace_me_with_a_child(req, self.client, replace)
        if t == b"uri":
            return self.replace_me_with_a_childcap(req, self.client, replace)

        raise WebError("PUT to a file: bad t=%s" % str(t, "utf-8"))
示例#10
0
def get_format(req, default="CHK"):
    arg = get_arg(req, "format", None)
    if not arg:
        if boolean_of_arg(get_arg(req, "mutable", "false")):
            return "SDMF"
        return default
    if arg.upper() == b"CHK":
        return "CHK"
    elif arg.upper() == b"SDMF":
        return "SDMF"
    elif arg.upper() == b"MDMF":
        return "MDMF"
    else:
        raise WebError(
            "Unknown format: %s, I know CHK, SDMF, MDMF" % str(arg, "ascii"),
            http.BAD_REQUEST)
示例#11
0
    def render_POST(self, req):
        t = get_arg(req, b"t", b"").strip()
        replace = boolean_of_arg(get_arg(req, b"replace", b"true"))
        if t == b"upload":
            # like PUT, but get the file data from an HTML form's input field.
            # We could get here from POST /uri/mutablefilecap?t=upload,
            # or POST /uri/path/file?t=upload, or
            # POST /uri/path/dir?t=upload&name=foo . All have the same
            # behavior, we just ignore any name= argument
            d = self.replace_me_with_a_formpost(req, self.client, replace)
        else:
            # t=mkdir is handled in DirectoryNodeHandler._POST_mkdir, so
            # there are no other t= values left to be handled by the
            # placeholder.
            raise WebError("POST to a file: bad t=%s" % str(t, "utf-8"))

        return handle_when_done(req, d)
示例#12
0
    def close(self):
        """Close the camera and release associated image memory.

        Should be called when you are done using the camera. Alternatively, you
        can use the camera as a context manager--see the documentation for
        __init__.
        """
        self._dev.ExitEvent(lib.SET_EVENT_SEQ)
        self._dev.ExitEvent(lib.SET_EVENT_FRAME)

        try:
            self._dev.ExitCamera()
            UC480_Camera._open_cameras.remove(self)
            self._in_use = False
        except Exception as e:
            log.error("Failed to close camera")
            log.error(str(e))
示例#13
0
def convert_children_json(nodemaker, children_json):
    """I convert the JSON output of GET?t=json into the dict-of-nodes input
    to both dirnode.create_subdirectory() and
    client.create_directory(initial_children=). This is used by
    t=mkdir-with-children and t=mkdir-immutable"""
    children = {}
    if children_json:
        data = json.loads(children_json)
        for (namex, (ctype, propdict)) in list(data.items()):
            namex = str(namex)
            writecap = to_bytes(propdict.get("rw_uri"))
            readcap = to_bytes(propdict.get("ro_uri"))
            metadata = propdict.get("metadata", {})
            # name= argument is just for error reporting
            childnode = nodemaker.create_from_cap(writecap,
                                                  readcap,
                                                  name=namex)
            children[namex] = (childnode, metadata)
    return children
示例#14
0
def _getGitShaString(dist=None, sha=None):
    """If generic==True then returns empty __git_sha__ string
    """
    shaStr = 'n/a'
    if dist is not None:
        proc = subprocess.Popen('git rev-parse --short HEAD',
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                cwd='.', shell=True)
        repo_commit, _ = proc.communicate()
        del proc  # to get rid of the background process
        if repo_commit:
            shaStr = str(repo_commit.strip())  # remove final linefeed
        else:
            shaStr = 'n/a'
        #this looks neater but raises errors on win32
        #        output = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).split()[0]
        #        if output:
        #            shaStr = output
    return shaStr
示例#15
0
def _getGitShaString(dist=None, sha=None):
    """If generic==True then returns empty __git_sha__ string
    """
    shaStr = 'n/a'
    if dist is not None:
        proc = subprocess.Popen('git rev-parse --short HEAD',
                                stdout=subprocess.PIPE,
                                stderr=subprocess.PIPE,
                                cwd='.', shell=True)
        repo_commit, _ = proc.communicate()
        del proc  # to get rid of the background process
        if repo_commit:
            shaStr = str(repo_commit.strip())  # remove final linefeed
        else:
            shaStr = 'n/a'
        #this looks neater but raises errors on win32
        #        output = subprocess.check_output(['git', 'rev-parse', '--short', 'HEAD']).split()[0]
        #        if output:
        #            shaStr = output
    return shaStr
示例#16
0
    def render_PUT(self, req):
        t = get_arg(req, b"t", b"").strip()
        replace = parse_replace_arg(get_arg(req, b"replace", b"true"))
        offset = parse_offset_arg(get_arg(req, b"offset", None))

        if not t:
            if not replace:
                # this is the early trap: if someone else modifies the
                # directory while we're uploading, the add_file(overwrite=)
                # call in replace_me_with_a_child will do the late trap.
                raise ExistingChildError()

            if self.node.is_mutable():
                # Are we a readonly filenode? We shouldn't allow callers
                # to try to replace us if we are.
                if self.node.is_readonly():
                    raise WebError("PUT to a mutable file: replace or update"
                                   " requested with read-only cap")
                if offset is None:
                    return self.replace_my_contents(req)

                if offset >= 0:
                    return self.update_my_contents(req, offset)

                raise WebError("PUT to a mutable file: Invalid offset")

            else:
                if offset is not None:
                    raise WebError("PUT to a file: append operation invoked "
                                   "on an immutable cap")

                assert self.parentnode and self.name
                return self.replace_me_with_a_child(req, self.client, replace)

        if t == b"uri":
            if not replace:
                raise ExistingChildError()
            assert self.parentnode and self.name
            return self.replace_me_with_a_childcap(req, self.client, replace)

        raise WebError("PUT to a file: bad t=%s" % str(t, "utf-8"))
示例#17
0
    def render_POST(self, req):
        t = get_arg(req, b"t", b"").strip()
        replace = boolean_of_arg(get_arg(req, b"replace", b"true"))
        if t == b"check":
            d = self._POST_check(req)
        elif t == b"upload":
            # like PUT, but get the file data from an HTML form's input field
            # We could get here from POST /uri/mutablefilecap?t=upload,
            # or POST /uri/path/file?t=upload, or
            # POST /uri/path/dir?t=upload&name=foo . All have the same
            # behavior, we just ignore any name= argument
            if self.node.is_mutable():
                d = self.replace_my_contents_with_a_formpost(req)
            else:
                if not replace:
                    raise ExistingChildError()
                assert self.parentnode and self.name
                d = self.replace_me_with_a_formpost(req, self.client, replace)
        else:
            raise WebError("POST to file: bad t=%s" % str(t, "ascii"))

        return handle_when_done(req, d)
示例#18
0
def convert_string_value_to_type_value(string_value, data_type):
    """Helper function to convert a given string to a given data type

    :param str string_value: the string to convert
    :param type data_type: the target data type
    :return: the converted value
    """
    from ast import literal_eval

    try:
        if data_type in (str, type(None)):
            converted_value = str(string_value)
        elif data_type == int:
            converted_value = int(string_value)
        elif data_type == float:
            converted_value = float(string_value)
        elif data_type == bool:
            converted_value = bool(literal_eval(string_value))
        elif data_type in (list, dict, tuple):
            converted_value = literal_eval(string_value)
            if type(converted_value) != data_type:
                raise ValueError("Invalid syntax: {0}".format(string_value))
        elif data_type == object:
            try:
                converted_value = literal_eval(string_value)
            except (ValueError, SyntaxError):
                converted_value = literal_eval('"' + string_value + '"')
        elif isinstance(data_type, type):  # Try native type conversion
            converted_value = data_type(string_value)
        elif isclass(data_type):  # Call class constructor
            converted_value = data_type(string_value)
        else:
            raise ValueError("No conversion from string '{0}' to data type '{0}' defined".format(
                string_value, data_type.__name__))
    except (ValueError, SyntaxError, TypeError) as e:
        raise AttributeError("Can't convert '{0}' to type '{1}': {2}".format(string_value, data_type.__name__, e))
    return converted_value
示例#19
0
    def render(self, req):
        """
        Dispatch to a renderer for a particular format, as selected by a query
        argument.

        A renderer for the format given by the query argument matching
        ``formatArgument`` will be selected and invoked.  render_HTML will be
        used as a default if no format is selected (either by query arguments
        or by ``formatDefault``).

        :return: The result of the selected renderer.
        """
        t = get_arg(req, self.formatArgument, self.formatDefault)
        # It's either bytes or None.
        if isinstance(t, bytes):
            t = str(t, "ascii")
        renderer = self._get_renderer(t)
        result = renderer(req)
        # On Python 3, json.dumps() returns Unicode for example, but
        # twisted.web expects bytes. Instead of updating every single render
        # method, just handle Unicode one time here.
        if isinstance(result, str):
            result = result.encode("utf-8")
        return result
示例#20
0
def unpack_extension(data):
    d = {}
    while data:
        colon = data.index(b':')
        key = data[:colon]
        data = data[colon + 1:]

        colon = data.index(b':')
        number = data[:colon]
        length = int(number)
        data = data[colon + 1:]

        value = data[:length]
        assert data[length:length + 1] == b','
        data = data[length + 1:]

        d[str(key, "utf-8")] = value

    # convert certain things to numbers
    for intkey in ('size', 'segment_size', 'num_segments', 'needed_shares',
                   'total_shares'):
        if intkey in d:
            d[intkey] = int(d[intkey])
    return d
示例#21
0
 def __repr__(self):
     return str(self.to_dict())  #self.cmd
示例#22
0
 def _make_key():
     private_key, _ = ed25519.create_signing_keypair()
     # Config values are always unicode:
     return str(ed25519.string_from_signing_key(private_key) + b"\n", "utf-8")
示例#23
0
def instrument(inst=None, **kwargs):
    """
    Create any Instrumental instrument object from an alias, parameters,
    or an existing instrument.

    >>> inst1 = instrument('MYAFG')
    >>> inst2 = instrument(visa_address='TCPIP::192.168.1.34::INSTR')
    >>> inst3 = instrument({'visa_address': 'TCPIP:192.168.1.35::INSTR'})
    >>> inst4 = instrument(inst1)
    """
    alias = None
    if inst is None:
        params = {}
    elif isinstance(inst, Instrument):
        return inst
    elif isinstance(inst, dict):
        params = inst
    elif isinstance(inst, str):
        name = inst
        params = conf.instruments.get(name, None)
        if params is None:
            # Try looking for the string in the output of list_instruments()
            test_str = name.lower()
            for inst_params in list_instruments():
                if test_str in str(inst_params).lower():
                    params = inst_params
                    break
        else:
            alias = name

        if params is None:
            raise Exception("Instrument with alias `{}` not ".format(name) +
                            "found in config file")

    params = params.copy()  # Make sure we don't modify any existing dicts
    params.update(kwargs)

    if 'server' in params:
        from . import remote
        host = params['server']
        session = remote.client_session(host)
        return session.instrument(params)

    if 'module' in params:
        # We've already been given the name of the module
        # SHOULD PROBABLY INTEGRATE THIS WITH THE OTHER CASE
        try:
            mod = import_module('.' + params['module'], __package__)
        except Exception as e:
            msg = (
                "\n\nSpecified module '{}' could not be imported. Make sure you have all of "
                "this driver module's dependencies installed.".format(
                    params['module']))
            e.args = (e.args[0] + msg, ) + e.args[1:]
            raise

        try:
            new_inst = mod._instrument(params)
        except InstrumentTypeError:
            raise Exception(
                "Instrument is not compatible with the given module")

        new_inst._alias = alias

        # HACK to allow 'parent' modules to do special initialization of instruments
        # We may get rid of this in the future by having each class's __init__ method directly
        # handle params, getting rid of the _instrument() middleman.
        parent_mod = import_module('.' + params['module'].rsplit('.', 1)[0],
                                   __package__)
        try:
            parent_mod._init_instrument(new_inst, params)
        except AttributeError:
            pass

        return new_inst

    # Find the right type of Instrument to create
    acceptable_modules = [
        mod_name for mod_name, acc_params in list(_acceptable_params.items())
        if _has_acceptable_params(acc_params, params)
    ]

    for mod_name in acceptable_modules:
        # Try to import module, skip it if optional deps aren't met
        try:
            log.info("Trying to import module '{}'".format(mod_name))
            mod = import_module('.' + mod_name, __package__)
        except Exception as e:
            if len(acceptable_modules) == 1: raise
            log.info("Module {} not supported, skipping".format(mod_name),
                     exc_info=e)
            continue

        # Try to create an instance of this instrument type
        try:
            log.info("Trying to create instrument using module '{}'".format(
                mod_name))
            new_inst = mod._instrument(params)
        except AttributeError:
            if len(acceptable_modules) == 1: raise
            log.info(
                "Module {} missing _instrument(), skipping".format(mod_name))
            continue
        except InstrumentTypeError:
            if len(acceptable_modules) == 1: raise
            log.info("Not the right type")
            continue
        except InstrumentNotFoundError:
            if len(acceptable_modules) == 1: raise
            log.info("Instrument not found")
            continue

        new_inst._alias = alias

        # HACK to allow 'parent' modules to do special initialization of instruments
        # We may get rid of this in the future by having each class's __init__ method directly
        # handle params, getting rid of the _instrument() middleman.
        parent_mod = import_module('.' + mod_name.rsplit('.', 1)[0],
                                   __package__)
        try:
            parent_mod._init_instrument(new_inst, params)
        except AttributeError:
            pass

        return new_inst

    # If we reach this point, we haven't been able to create a valid instrument
    if not acceptable_modules:
        raise Exception(
            "Parameters {} match no existing driver module".format(params))
    else:
        raise Exception("No instrument matching {} was found".format(params))
示例#24
0
def spaces_to_nbsp(text):
    return str(text).replace(u' ', u'\u00A0')
示例#25
0
def list_visa_instruments():
    """Returns a list of info about available VISA instruments.

    May take a few seconds because it must poll the network.

    It actually returns a list of specialized dict objects that contain
    parameters needed to create an instance of the given instrument. You can
    then get the actual instrument by passing the dict to
    :py:func:`~instrumental.drivers.instrument`.

    >>> inst_list = get_visa_instruments()
    >>> print(inst_list)
    [<TEKTRONIX 'TDS 3032'>, <TEKTRONIX 'AFG3021B'>]
    >>> inst = instrument(inst_list[0])
    """
    import visa
    instruments, skipped = [], []
    prev_addr = 'START'
    rm = visa.ResourceManager()
    visa_list = rm.list_resources()
    for addr in visa_list:
        if not addr.startswith(prev_addr):
            prev_addr = addr
            try:
                log.info("Opening VISA resource '{}'".format(addr))
                i = rm.open_resource(addr, open_timeout=50, timeout=200)
            except visa.VisaIOError as e:
                # Could not create visa instrument object
                skipped.append(addr)
                log.info("Skipping this resource due to VisaIOError")
                log.info(e)
                continue
            except socket.timeout:
                skipped.append(addr)
                log.info("Skipping this resource due to socket.timeout")
                continue

            try:
                idn = i.ask("*IDN?")
                log.info("*IDN? gives '{}'".format(idn.strip()))
                try:
                    manufac, model, rest = idn.split(',', 2)
                except ValueError as e:
                    skipped.append(addr)
                    log.info("Invalid response to IDN query")
                    log.info(str(e))
                    continue

                module_name = _find_visa_inst_type(manufac, model)
                params = _ParamDict("<{} '{}'>".format(manufac, model))
                params['visa_address'] = addr
                if module_name:
                    params.module = module_name
                instruments.append(params)
            except UnicodeDecodeError as e:
                skipped.append(addr)
                log.info(
                    "UnicodeDecodeError while getting IDN. Probably a non-Visa Serial device"
                )
                log.info(str(e))
                continue
            except visa.VisaIOError as e:
                skipped.append(addr)
                log.info("Getting IDN failed due to VisaIOError")
                log.info(str(e))
                continue
            except socket.timeout:
                skipped.append(addr)
                log.info("Getting IDN failed due to socket.timeout")
                continue
            finally:
                i.close()
    return instruments
示例#26
0
def should_create_intermediate_directories(req):
    t = str(get_arg(req, "t", "").strip(), "ascii")
    return bool(req.method in (b"PUT", b"POST")
                and t not in ("delete", "rename", "rename-form", "check"))
示例#27
0
    def run(self, date=None):
        # NOTE(willkg): This lets us have a dry-run app that doesn't run as
        # a backfill app. In the normal case, this will get passed a date.
        date = date or utc_now()

        db_class = self.config.primary_destination.database_class
        primary_database = db_class(self.config.primary_destination)
        tx_class = self.config.primary_destination.transaction_executor_class
        primary_transaction = tx_class(
            self.config,
            primary_database,
        )
        transactions = [primary_transaction]

        db_class = self.config.secondary_destination.database_class
        # The reason for checking if this is anything at all is
        # because one way of disabling the secondary destination
        # is to set the database_class to an empty string.
        if db_class:
            secondary_database = db_class(self.config.secondary_destination)
            if secondary_database.config != primary_database.config:
                # The secondary really is different from the first one.
                # By default, if not explicitly set, it'll pick up the same
                # resource values as the first one.
                tx_class = (
                    self.config.secondary_destination
                    .transaction_executor_class
                )
                secondary_transaction = tx_class(
                    self.config,
                    secondary_database,
                )
                transactions.append(secondary_transaction)

        target_date = (date - datetime.timedelta(days=1)).strftime('%Y-%m-%d')

        raw_adi_logs_pathname = os.path.join(
            tempfile.gettempdir(),
            "%s.raw_adi_logs.TEMPORARY%s" % (
                target_date,
                '.txt'
            )
        )
        try:
            with codecs.open(raw_adi_logs_pathname, 'w', 'utf-8') as f:
                hive = pyhs2.connect(
                    host=self.config.hive_host,
                    port=self.config.hive_port,
                    authMechanism=self.config.hive_auth_mechanism,
                    user=self.config.hive_user,
                    password=self.config.hive_password,
                    database=self.config.hive_database,
                    # the underlying TSocket setTimeout() wants milliseconds
                    timeout=self.config.timeout * 1000
                )

                cur = hive.cursor()
                query = self.config.query % target_date
                cur.execute(query)
                rows_written = 0
                for row in cur:
                    if None in row:
                        continue
                    f.write(
                        "\t"
                        .join(
                            self.remove_control_characters(
                                urllib2.unquote(v)
                            ).replace('\\', '\\\\')
                            if isinstance(v, basestring) else str(v)
                            for v in row
                        )
                    )
                    f.write("\n")
                    rows_written += 1

            if not rows_written:
                raise NoRowsWritten('hive yielded no rows to write')

            self.config.logger.info(
                'Wrote %d rows from doing hive query' % rows_written
            )

            self.persist_data(transactions, raw_adi_logs_pathname, target_date)

        finally:
            if os.path.isfile(raw_adi_logs_pathname):
                os.remove(raw_adi_logs_pathname)
示例#28
0
    # This should not happen very often.
    pass

full_version = "unknown"
branch = "unknown"
try:
    # type ignored as it fails in CI
    # (https://app.circleci.com/pipelines/github/tahoe-lafs/tahoe-lafs/1647/workflows/60ae95d4-abe8-492c-8a03-1ad3b9e42ed3/jobs/40972)
    from allmydata._version import full_version, branch  # type: ignore
except ImportError:
    # We're running in a tree that hasn't run update_version, and didn't
    # come with a _version.py, so we don't know what our full version or
    # branch is. This should not happen very often.
    pass

__appname__ = "tahoe-lafs"

# __full_version__ is the one that you ought to use when identifying yourself
# in the "application" part of the Tahoe versioning scheme:
# https://tahoe-lafs.org/trac/tahoe-lafs/wiki/Versioning
__full_version__ = __appname__ + '/' + str(__version__)

# Install Python 3 module locations in Python 2:
from future import standard_library
standard_library.install_aliases()

# Monkey-patch 3rd party libraries:
from ._monkeypatch import patch
patch()
del patch
示例#29
0
def humanize_exception(exc):
    """
    Like ``humanize_failure`` but for an exception.

    :param Exception exc: The exception to describe.

    :return: See ``humanize_failure``.
    """
    if isinstance(exc, EmptyPathnameComponentError):
        return ("The webapi does not allow empty pathname components, "
                "i.e. a double slash", http.BAD_REQUEST)
    if isinstance(exc, ExistingChildError):
        return ("There was already a child by that name, and you asked me "
                "to not replace it.", http.CONFLICT)
    if isinstance(exc, NoSuchChildError):
        quoted_name = quote_output(exc.args[0],
                                   encoding="utf-8",
                                   quotemarks=False)
        return ("No such child: %s" % quoted_name, http.NOT_FOUND)
    if isinstance(exc, NotEnoughSharesError):
        t = ("NotEnoughSharesError: This indicates that some "
             "servers were unavailable, or that shares have been "
             "lost to server departure, hard drive failure, or disk "
             "corruption. You should perform a filecheck on "
             "this object to learn more.\n\nThe full error message is:\n"
             "%s") % str(exc)
        return (t, http.GONE)
    if isinstance(exc, NoSharesError):
        t = ("NoSharesError: no shares could be found. "
             "Zero shares usually indicates a corrupt URI, or that "
             "no servers were connected, but it might also indicate "
             "severe corruption. You should perform a filecheck on "
             "this object to learn more.\n\nThe full error message is:\n"
             "%s") % str(exc)
        return (t, http.GONE)
    if isinstance(exc, UnrecoverableFileError):
        t = ("UnrecoverableFileError: the directory (or mutable file) could "
             "not be retrieved, because there were insufficient good shares. "
             "This might indicate that no servers were connected, "
             "insufficient servers were connected, the URI was corrupt, or "
             "that shares have been lost due to server departure, hard drive "
             "failure, or disk corruption. You should perform a filecheck on "
             "this object to learn more.")
        return (t, http.GONE)
    if isinstance(exc, MustNotBeUnknownRWError):
        quoted_name = quote_output(exc.args[1], encoding="utf-8")
        immutable = exc.args[2]
        if immutable:
            t = (
                "MustNotBeUnknownRWError: an operation to add a child named "
                "%s to a directory was given an unknown cap in a write slot.\n"
                "If the cap is actually an immutable readcap, then using a "
                "webapi server that supports a later version of Tahoe may help.\n\n"
                "If you are using the webapi directly, then specifying an immutable "
                "readcap in the read slot (ro_uri) of the JSON PROPDICT, and "
                "omitting the write slot (rw_uri), would also work in this "
                "case.") % quoted_name
        else:
            t = (
                "MustNotBeUnknownRWError: an operation to add a child named "
                "%s to a directory was given an unknown cap in a write slot.\n"
                "Using a webapi server that supports a later version of Tahoe "
                "may help.\n\n"
                "If you are using the webapi directly, specifying a readcap in "
                "the read slot (ro_uri) of the JSON PROPDICT, as well as a "
                "writecap in the write slot if desired, would also work in this "
                "case.") % quoted_name
        return (t, http.BAD_REQUEST)
    if isinstance(exc, MustBeDeepImmutableError):
        quoted_name = quote_output(exc.args[1], encoding="utf-8")
        t = ("MustBeDeepImmutableError: a cap passed to this operation for "
             "the child named %s, needed to be immutable but was not. Either "
             "the cap is being added to an immutable directory, or it was "
             "originally retrieved from an immutable directory as an unknown "
             "cap.") % quoted_name
        return (t, http.BAD_REQUEST)
    if isinstance(exc, MustBeReadonlyError):
        quoted_name = quote_output(exc.args[1], encoding="utf-8")
        t = (
            "MustBeReadonlyError: a cap passed to this operation for "
            "the child named '%s', needed to be read-only but was not. "
            "The cap is being passed in a read slot (ro_uri), or was retrieved "
            "from a read slot as an unknown cap.") % quoted_name
        return (t, http.BAD_REQUEST)
    if isinstance(exc, blacklist.FileProhibited):
        t = "Access Prohibited: %s" % quote_output(
            exc.reason, encoding="utf-8", quotemarks=False)
        return (t, http.FORBIDDEN)
    if isinstance(exc, WebError):
        return (exc.text, exc.code)
    if isinstance(exc, FileTooLargeError):
        return ("FileTooLargeError: %s" % (exc, ),
                http.REQUEST_ENTITY_TOO_LARGE)
    return (str(exc), None)
示例#30
0
 def __repr__(self):
     return str(self.to_dict()) #self.cmd
def sww2dem(
        name_in,
        name_out,
        quantity=None,  # defaults to elevation
        reduction=None,
        cellsize=10,
        number_of_decimal_places=None,
        NODATA_value=-9999.0,
        easting_min=None,
        easting_max=None,
        northing_min=None,
        northing_max=None,
        verbose=False,
        origin=None,
        datum='WGS84',
        block_size=None):
    """Read SWW file and convert to Digitial Elevation model format
    (.asc or .ers)

    Example (ASC):
    ncols         3121
    nrows         1800
    xllcorner     722000
    yllcorner     5893000
    cellsize      25
    NODATA_value  -9999
    138.3698 137.4194 136.5062 135.5558 ..........

    The number of decimal places can be specified by the user to save
    on disk space requirements by specifying in the call to sww2dem.

    Also write accompanying file with same basename_in but extension .prj
    used to fix the UTM zone, datum, false northings and eastings.

    The prj format is assumed to be as

    Projection    UTM
    Zone          56
    Datum         WGS84
    Zunits        NO
    Units         METERS
    Spheroid      WGS84
    Xshift        0.0000000000
    Yshift        10000000.0000000000
    Parameters

    The parameter quantity must be the name of an existing quantity or
    an expression involving existing quantities. The default is
    'elevation'. Quantity is not a list of quantities.

    If reduction is given and it's an index, sww2dem will output the quantity at that time-step. 
    If reduction is given and it's a built in function (eg max, min, mean), then that 
    function is used to reduce the quantity over all time-steps. If reduction is not given, 
    reduction is set to "max" by default.

    datum

    format can be either 'asc' or 'ers'
    block_size - sets the number of slices along the non-time axis to
                 process in one block.
    """

    import sys
    import types

    from anuga.geometry.polygon import inside_polygon, outside_polygon
    from anuga.abstract_2d_finite_volumes.util import \
         apply_expression_to_dictionary

    basename_in, in_ext = os.path.splitext(name_in)
    basename_out, out_ext = os.path.splitext(name_out)
    out_ext = out_ext.lower()

    if in_ext != '.sww':
        raise IOError('Input format for %s must be .sww' % name_in)

    if out_ext not in ['.asc', '.ers']:
        raise IOError('Format for %s must be either asc or ers.' % name_out)

    false_easting = 500000
    false_northing = 10000000

    if quantity is None:
        quantity = 'elevation'

    if reduction is None:
        reduction = max

    if quantity in quantity_formula:
        quantity = quantity_formula[quantity]

    if number_of_decimal_places is None:
        number_of_decimal_places = 3

    if block_size is None:
        block_size = DEFAULT_BLOCK_SIZE

    assert (isinstance(block_size, (int, int, float)))

    # Read sww file
    if verbose:
        log.critical('Reading from %s' % name_in)
        log.critical('Output directory is %s' % name_out)

    from anuga.file.netcdf import NetCDFFile
    fid = NetCDFFile(name_in)

    #Get extent and reference
    x = num.array(fid.variables['x'], num.float)
    y = num.array(fid.variables['y'], num.float)
    volumes = num.array(fid.variables['volumes'], num.int)
    if type(reduction) is not types.BuiltinFunctionType:
        times = fid.variables['time'][reduction]
    else:
        times = fid.variables['time'][:]

    number_of_timesteps = fid.dimensions['number_of_timesteps']
    number_of_points = fid.dimensions['number_of_points']

    if origin is None:
        # Get geo_reference
        # sww files don't have to have a geo_ref
        try:
            geo_reference = Geo_reference(NetCDFObject=fid)
        except AttributeError as e:
            geo_reference = Geo_reference()  # Default georef object

        xllcorner = geo_reference.get_xllcorner()
        yllcorner = geo_reference.get_yllcorner()
        zone = geo_reference.get_zone()
    else:
        zone = origin[0]
        xllcorner = origin[1]
        yllcorner = origin[2]

    # FIXME: Refactor using code from Interpolation_function.statistics
    # (in interpolate.py)
    # Something like print swwstats(swwname)
    if verbose:
        log.critical('------------------------------------------------')
        log.critical('Statistics of SWW file:')
        log.critical('  Name: %s' % name_in)
        log.critical('  Reference:')
        log.critical('    Lower left corner: [%f, %f]' %
                     (xllcorner, yllcorner))
        if type(reduction) is not types.BuiltinFunctionType:
            log.critical('    Time: %f' % times)
        else:
            log.critical('    Start time: %f' % fid.starttime[0])
        log.critical('  Extent:')
        log.critical('    x [m] in [%f, %f], len(x) == %d' %
                     (num.min(x), num.max(x), len(x.flat)))
        log.critical('    y [m] in [%f, %f], len(y) == %d' %
                     (num.min(y), num.max(y), len(y.flat)))
        if type(reduction) is not types.BuiltinFunctionType:
            log.critical('    t [s] = %f, len(t) == %d' % (times, 1))
        else:
            log.critical('    t [s] in [%f, %f], len(t) == %d' %
                         (min(times), max(times), len(times)))
        log.critical('  Quantities [SI units]:')

        # Comment out for reduced memory consumption
        for name in ['stage', 'xmomentum', 'ymomentum']:
            q = fid.variables[name][:].flatten()
            if type(reduction) is not types.BuiltinFunctionType:
                q = q[reduction * len(x):(reduction + 1) * len(x)]
            if verbose:
                log.critical('    %s in [%f, %f]' % (name, min(q), max(q)))
        for name in ['elevation']:
            q = fid.variables[name][:].flatten()
            if verbose:
                log.critical('    %s in [%f, %f]' % (name, min(q), max(q)))

    # Get the variables in the supplied expression.
    # This may throw a SyntaxError exception.
    var_list = get_vars_in_expression(quantity)

    # Check that we have the required variables in the SWW file.
    missing_vars = []
    for name in var_list:
        try:
            _ = fid.variables[name]
        except KeyError:
            missing_vars.append(name)
    if missing_vars:
        msg = (
            "In expression '%s', variables %s are not in the SWW file '%s'" %
            (quantity, str(missing_vars), name_in))
        raise_(Exception, msg)

    # Create result array and start filling, block by block.
    result = num.zeros(number_of_points, num.float)

    if verbose:
        msg = 'Slicing sww file, num points: ' + str(number_of_points)
        msg += ', block size: ' + str(block_size)
        log.critical(msg)

    for start_slice in range(0, number_of_points, block_size):
        # Limit slice size to array end if at last block
        end_slice = min(start_slice + block_size, number_of_points)

        # Get slices of all required variables
        q_dict = {}
        for name in var_list:
            # check if variable has time axis
            if len(fid.variables[name].shape) == 2:
                q_dict[name] = fid.variables[name][:, start_slice:end_slice]
            else:  # no time axis
                q_dict[name] = fid.variables[name][start_slice:end_slice]

        # Evaluate expression with quantities found in SWW file
        res = apply_expression_to_dictionary(quantity, q_dict)

        if len(res.shape) == 2:
            new_res = num.zeros(res.shape[1], num.float)
            for k in range(res.shape[1]):
                if type(reduction) is not types.BuiltinFunctionType:
                    new_res[k] = res[reduction, k]
                else:
                    new_res[k] = reduction(res[:, k])
            res = new_res

        result[start_slice:end_slice] = res

    # Post condition: Now q has dimension: number_of_points
    assert len(result.shape) == 1
    assert result.shape[0] == number_of_points

    if verbose:
        log.critical('Processed values for %s are in [%f, %f]' %
                     (quantity, min(result), max(result)))

    # Create grid and update xll/yll corner and x,y
    # Relative extent
    if easting_min is None:
        xmin = min(x)
    else:
        xmin = easting_min - xllcorner

    if easting_max is None:
        xmax = max(x)
    else:
        xmax = easting_max - xllcorner

    if northing_min is None:
        ymin = min(y)
    else:
        ymin = northing_min - yllcorner

    if northing_max is None:
        ymax = max(y)
    else:
        ymax = northing_max - yllcorner

    msg = 'xmax must be greater than or equal to xmin.\n'
    msg += 'I got xmin = %f, xmax = %f' % (xmin, xmax)
    assert xmax >= xmin, msg

    msg = 'ymax must be greater than or equal to xmin.\n'
    msg += 'I got ymin = %f, ymax = %f' % (ymin, ymax)
    assert ymax >= ymin, msg

    if verbose: log.critical('Creating grid')
    ncols = int(old_div((xmax - xmin), cellsize)) + 1
    nrows = int(old_div((ymax - ymin), cellsize)) + 1

    # New absolute reference and coordinates
    newxllcorner = xmin + xllcorner
    newyllcorner = ymin + yllcorner

    x = x + xllcorner - newxllcorner
    y = y + yllcorner - newyllcorner

    vertex_points = num.concatenate((x[:, num.newaxis], y[:, num.newaxis]),
                                    axis=1)
    assert len(vertex_points.shape) == 2

    def calc_grid_values_old(vertex_points, volumes, result):

        grid_points = num.zeros((ncols * nrows, 2), num.float)

        for i in range(nrows):
            if out_ext == '.asc':
                yg = i * cellsize
            else:
                # this will flip the order of the y values for ers
                yg = (nrows - i) * cellsize

            for j in range(ncols):
                xg = j * cellsize
                k = i * ncols + j

                grid_points[k, 0] = xg
                grid_points[k, 1] = yg

        # Interpolate
        from anuga.fit_interpolate.interpolate import Interpolate

        # Remove loners from vertex_points, volumes here
        vertex_points, volumes = remove_lone_verts(vertex_points, volumes)
        # export_mesh_file('monkey.tsh',{'vertices':vertex_points, 'triangles':volumes})

        interp = Interpolate(vertex_points, volumes, verbose=verbose)

        bprint = 0

        # Interpolate using quantity values
        if verbose: log.critical('Interpolating')
        grid_values = interp.interpolate(bprint, result, grid_points).flatten()
        outside_indices = interp.get_outside_poly_indices()

        for i in outside_indices:
            #print 'change grid_value',NODATA_value
            grid_values[i] = NODATA_value

        return grid_values

    def calc_grid_values(vertex_points, volumes, result):

        grid_points = num.zeros((ncols * nrows, 2), num.float)

        for i in range(nrows):
            if out_ext == '.asc':
                yg = i * cellsize
            else:
                #this will flip the order of the y values for ers
                yg = (nrows - i) * cellsize

            for j in range(ncols):
                xg = j * cellsize
                k = i * ncols + j

                grid_points[k, 0] = xg
                grid_points[k, 1] = yg

        grid_values = num.zeros(ncols * nrows, num.float)

        eval_grid(nrows, ncols, NODATA_value, grid_points,
                  vertex_points.flatten(), volumes, result, grid_values)
        return grid_values.flatten()

    grid_values = calc_grid_values(vertex_points, volumes, result)

    if verbose:
        log.critical('Interpolated values are in [%f, %f]' %
                     (num.min(grid_values), num.max(grid_values)))

    # Assign NODATA_value to all points outside bounding polygon (from interpolation mesh)


#    P = interp.mesh.get_boundary_polygon()
#    outside_indices = outside_polygon(grid_points, P, closed=True)

    if out_ext == '.ers':
        # setup ERS header information
        grid_values = num.reshape(grid_values, (nrows, ncols))
        header = {}
        header['datum'] = '"' + datum + '"'
        # FIXME The use of hardwired UTM and zone number needs to be made optional
        # FIXME Also need an automatic test for coordinate type (i.e. EN or LL)
        header['projection'] = '"UTM-' + str(zone) + '"'
        header['coordinatetype'] = 'EN'
        if header['coordinatetype'] == 'LL':
            header['longitude'] = str(newxllcorner)
            header['latitude'] = str(newyllcorner)
        elif header['coordinatetype'] == 'EN':
            header['eastings'] = str(newxllcorner)
            header['northings'] = str(newyllcorner)
        header['nullcellvalue'] = str(NODATA_value)
        header['xdimension'] = str(cellsize)
        header['ydimension'] = str(cellsize)
        header['value'] = '"' + quantity + '"'
        #header['celltype'] = 'IEEE8ByteReal'  #FIXME: Breaks unit test

        #Write
        if verbose:
            log.critical('Writing %s' % name_out)

        import ermapper_grids

        ermapper_grids.write_ermapper_grid(name_out, grid_values, header)

        fid.close()

    else:
        #Write to Ascii format
        #Write prj file
        prjfile = basename_out + '.prj'

        if verbose: log.critical('Writing %s' % prjfile)
        prjid = open(prjfile, 'w')
        prjid.write('Projection    %s\n' % 'UTM')
        prjid.write('Zone          %d\n' % zone)
        prjid.write('Datum         %s\n' % datum)
        prjid.write('Zunits        NO\n')
        prjid.write('Units         METERS\n')
        prjid.write('Spheroid      %s\n' % datum)
        prjid.write('Xshift        %d\n' % false_easting)
        prjid.write('Yshift        %d\n' % false_northing)
        prjid.write('Parameters\n')
        prjid.close()

        if verbose: log.critical('Writing %s' % name_out)

        ascid = open(name_out, 'w')

        ascid.write('ncols         %d\n' % ncols)
        ascid.write('nrows         %d\n' % nrows)
        ascid.write('xllcorner     %d\n' % newxllcorner)
        ascid.write('yllcorner     %d\n' % newyllcorner)
        ascid.write('cellsize      %f\n' % cellsize)
        ascid.write('NODATA_value  %d\n' % NODATA_value)

        #Get bounding polygon from mesh
        #P = interp.mesh.get_boundary_polygon()
        #inside_indices = inside_polygon(grid_points, P)

        # change printoptions so that a long string of zeros in not
        # summarized as [0.0, 0.0, 0.0, ... 0.0, 0.0, 0.0]
        #printoptions = num.get_printoptions()
        #num.set_printoptions(threshold=sys.maxint)

        format = '%.' + '%g' % number_of_decimal_places + 'e'
        for i in range(nrows):
            if verbose and i % (old_div((nrows + 10), 10)) == 0:
                log.critical('Doing row %d of %d' % (i, nrows))

            base_index = (nrows - i - 1) * ncols

            slice = grid_values[base_index:base_index + ncols]

            num.savetxt(ascid, slice.reshape(1, ncols), format, ' ')

        #Close
        ascid.close()
        fid.close()

        return basename_out
示例#32
0
文件: s3.py 项目: yushu-liu/papermill
 def __repr__(self):
     return str(self)