Ejemplo n.º 1
0
import pathlib
import os
import types

import pyflexconfig

config = types.SimpleNamespace(
    HOME=os.getenv("HOME", "unset")
)


def custom_parser(path):
    # Stupid custom parser that just adds an option
    result: dict = pyflexconfig.pure_python_parser(path)
    result["ADDED_BY_PARSER"] = "anything"
    return result


def filters_nothing_more(conf):
    # We do like the standdard filter ;o)
    pyflexconfig.keep_upper_names(conf)
    return


def bootstrap():
    global config
    defaultsettings_path = pathlib.Path(__file__).resolve().parent / "defaultsettings.py"
    pyflexconfig.bootstrap(config, parser=custom_parser, defaults_path=defaultsettings_path,
                           custom_path_envvar="CUSTOM_OPTIONS_FILE", filter_=filters_nothing_more)
Ejemplo n.º 2
0
import logging
logging.basicConfig(level=logging.INFO)
from ..experiments_security_game_one import Train
# import matplotlib.pyplot as plt
import numpy as np
import os

import types
args = types.SimpleNamespace()

args.split_settings = 'const_val'
args.val_size = 2000

args.loss = 'vlogloss'
args.datasize = 200
args.lr = 0.005
args.nEpochs = 2000
args.monitorFreq = 5
args.fracval = 0.3
args.batchsize = 128
args.tie_initial_raiseval = True
args.uniform_dist = False
args.single_feature = 1
save_path_template = './results/secgame/secgameone_size200_SLOWrmsprop_bsize_vonly/%d_%d/'
args.max_val = 2.
args.success_rates = '0.5,0.5'
args.static_rewards = 1

args.momentum = 0.1
args.optimizer = 'rmsprop'
args.fixbets = 10
Ejemplo n.º 3
0
 def setUp(self):
     self.config = types.SimpleNamespace()
Ejemplo n.º 4
0
 def lex(self, filename: str) -> T.Generator[Token, None, None]:
     line_start = 0
     lineno = 1
     loc = 0
     par_count = 0
     bracket_count = 0
     curl_count = 0
     col = 0
     while loc < len(self.code):
         matched = False
         value = None  # type: T.Union[str, bool, int]
         for (tid, reg) in self.token_specification:
             mo = reg.match(self.code, loc)
             if mo:
                 curline = lineno
                 curline_start = line_start
                 col = mo.start() - line_start
                 matched = True
                 span_start = loc
                 loc = mo.end()
                 span_end = loc
                 bytespan = (span_start, span_end)
                 match_text = mo.group()
                 if tid == 'ignore' or tid == 'comment':
                     break
                 elif tid == 'lparen':
                     par_count += 1
                 elif tid == 'rparen':
                     par_count -= 1
                 elif tid == 'lbracket':
                     bracket_count += 1
                 elif tid == 'rbracket':
                     bracket_count -= 1
                 elif tid == 'lcurl':
                     curl_count += 1
                 elif tid == 'rcurl':
                     curl_count -= 1
                 elif tid == 'dblquote':
                     raise ParseException('Double quotes are not supported. Use single quotes.', self.getline(line_start), lineno, col)
                 elif tid in {'string', 'fstring'}:
                     # Handle here and not on the regexp to give a better error message.
                     if match_text.find("\n") != -1:
                         msg = ParseException("Newline character in a string detected, use ''' (three single quotes) "
                                              "for multiline strings instead.\n"
                                              "This will become a hard error in a future Meson release.",
                                              self.getline(line_start), lineno, col)
                         mlog.warning(msg, location=BaseNode(lineno, col, filename))
                     value = match_text[2 if tid == 'fstring' else 1:-1]
                     try:
                         value = ESCAPE_SEQUENCE_SINGLE_RE.sub(decode_match, value)
                     except MesonUnicodeDecodeError as err:
                         raise MesonException(f"Failed to parse escape sequence: '{err.match}' in string:\n  {match_text}")
                 elif tid == 'multiline_string':
                     tid = 'string'
                     value = match_text[3:-3]
                     lines = match_text.split('\n')
                     if len(lines) > 1:
                         lineno += len(lines) - 1
                         line_start = mo.end() - len(lines[-1])
                 elif tid == 'number':
                     value = int(match_text, base=0)
                 elif tid == 'eol_cont':
                     lineno += 1
                     line_start = loc
                     break
                 elif tid == 'eol':
                     lineno += 1
                     line_start = loc
                     if par_count > 0 or bracket_count > 0 or curl_count > 0:
                         break
                 elif tid == 'id':
                     if match_text in self.keywords:
                         tid = match_text
                     else:
                         if match_text in self.future_keywords:
                             mlog.warning(f"Identifier '{match_text}' will become a reserved keyword in a future release. Please rename it.",
                                          location=types.SimpleNamespace(filename=filename, lineno=lineno))
                         value = match_text
                 yield Token(tid, filename, curline_start, curline, col, bytespan, value)
                 break
         if not matched:
             raise ParseException('lexer', self.getline(line_start), lineno, col)
	def test_empty_simple_namespace(self):
		ns = types.SimpleNamespace()
		formatted = FancyPrinter().pformat(ns)
		assert formatted == "namespace()"
Ejemplo n.º 6
0
def main(argv):

    login = json.load(open(os.path.join(clientDir, 'config', 'login.json'), ))
    # initialize thread channels
    thcmds = {}
    thcmds['tgApi'] = Queue()
    thcmds['bot'] = Queue()
    ths = {}
    ths['tgApi'] = tgApi(
        cmdQueue=thcmds,
        token=login['telegram']['token'],
        botname=login['telegram']['botname'],
        authgroup=login['telegram']['authgroup'],
        itag="tgApi",
        otag="bot",
    )
    print(
        'telegram:',
        login['telegram']['token'],
        login['telegram']['botname'],
        login['telegram']['authgroup'],
    )
    # prepare threadings
    # initialize threadings
    for key, th in ths.items():
        th.daemon = True
        th.start()
    # initialize process channels
    manager = Manager()
    smpCmds = {}
    rmpCmds = {}
    impReqs = {}
    mps = {}
    is_on = {}
    is_auto = {}

    thcmds['tgApi'].put('client starts')
    pkey = 'bot'

    # socket
    HOST = login['server']['host']
    PORT = login['server']['port']
    print(
        'MT Communication @',
        HOST,
        PORT,
    )

    sel = selectors.DefaultSelector()
    lsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    lsock.bind((HOST, PORT))
    lsock.listen()
    print('listening on', (HOST, PORT))
    lsock.setblocking(False)
    sel.register(lsock, selectors.EVENT_READ, data=None)
    while True:
        events = sel.select(timeout=None)
        for key, mask in events:
            if key.data is None:
                # accept_wrapper(key.fileobj)
                sock = key.fileobj
                conn, addr = sock.accept()  # Should be ready to read
                print('accepted connection from', addr)
                conn.setblocking(False)
                data = types.SimpleNamespace(addr=addr, inb=b'', outb=b'')
                events = selectors.EVENT_READ | selectors.EVENT_WRITE
                sel.register(conn, events, data=data)
            else:
                # service_connection(key, mask)
                sock = key.fileobj
                data = key.data
                if mask & selectors.EVENT_READ:
                    recv_data = sock.recv(1024)  # Should be ready to read
                    if recv_data:
                        data.outb += recv_data
                    else:
                        print('closing connection to', data.addr)
                        sel.unregister(sock)
                        sock.close()
                if mask & selectors.EVENT_WRITE:
                    if data.outb:
                        try:
                            thcmds['tgApi'].put(data.outb.decode())
                        except Exception as e:
                            print(e)
                        print('echoing', repr(data.outb), 'to', data.addr)
                        sent = sock.send(data.outb)  # Should be ready to write
                        data.outb = data.outb[sent:]
Ejemplo n.º 7
0
    def _update_phantoms(self):
        if not self.view.is_primary():
            return
        # not sure why this happens, but ignore these cases
        if self.view.window() is None:
            return
        if not ghostscript_installed():
            return

        view = self.view
        window = view.window()

        # see #980; in any case window is None only for newly created views
        # where there isn't much point in running the phantom update.
        if (window is None or not any(
                window.active_view_in_group(g) == view
                for g in range(window.num_groups()))):
            return

        # TODO we may only want to apply if the view is visible
        # if view != view.window().active_view():
        #     return

        # update the regions of the phantoms
        self._update_phantom_regions()

        new_phantoms = []
        job_args = []
        if not _IS_ENABLED or self.visible_mode == "none":
            if not self.phantoms:
                return
            scopes = []
        elif self.visible_mode == "all":
            scopes = view.find_by_selector(
                "text.tex.latex meta.environment.math")
        elif self.visible_mode == "selected":
            math_scopes = view.find_by_selector(
                "text.tex.latex meta.environment.math")
            scopes = [
                scope for scope in math_scopes if any(
                    scope.contains(sel) for sel in view.sel())
            ]
        else:
            self.visible_mode = "none"
            scopes = []

        # avoid creating a preview if someone just inserts $|$ and
        # most likely want to create an inline and not a block block
        def is_dollar_snippet(scope):
            is_selector = view.score_selector(
                scope.begin(), "meta.environment.math.block.dollar")
            sel_at_start = any(sel.empty() and sel.b == scope.begin() + 1
                               for sel in view.sel())
            return is_selector and sel_at_start

        scopes = [scope for scope in scopes if not is_dollar_snippet(scope)]

        color = self.color
        # if no foreground color is defined use the default test color
        if not color:
            color = get_color(view)

        style_kwargs = {
            "color": color,
            "background_color": self.background_color
        }

        for scope in scopes:
            content = view.substr(scope)
            multline = "\n" in content

            layout = (sublime.LAYOUT_BLOCK if multline or self.visible_mode
                      == "selected" else sublime.LAYOUT_INLINE)
            BE_BLOCK = view.score_selector(scope.begin(),
                                           "meta.environment.math.block.be")

            # avoid jumping around in begin end block
            if multline and BE_BLOCK:
                region = sublime.Region(scope.end() + 4)
            else:
                region = sublime.Region(scope.end())

            try:
                p = next(e for e in self.phantoms if e.region == region)
                if p.content == content:
                    new_phantoms.append(p)
                    continue

                # update the content and the layout
                p.content = content
                p.layout = layout
            except:
                p = types.SimpleNamespace(id=None,
                                          region=region,
                                          content=content,
                                          layout=layout,
                                          update_time=0)

            # generate the latex template
            latex_document = self._create_document(scope, color)

            # create a string, which uniquely identifies the compiled document
            id_str = "\n".join([
                str(_version), self.latex_program,
                str(_density),
                str(_hires), color, latex_document
            ])
            base_name = cache.hash_digest(id_str)
            image_path = os.path.join(temp_path, base_name + _IMAGE_EXTENSION)

            # if the file exists as an image update the phantom
            if os.path.exists(image_path):
                if p.id is not None:
                    view.erase_phantom_by_id(p.id)
                    _cancel_image_jobs(view.id(), p)
                html_content = _generate_html(view, image_path, style_kwargs)
                p.id = view.add_phantom(self.key,
                                        region,
                                        html_content,
                                        layout,
                                        on_navigate=self.on_navigate)
                new_phantoms.append(p)
                continue
            # if neither the file nor the phantom exists, create a
            # placeholder phantom
            elif p.id is None:
                p.id = view.add_phantom(self.key,
                                        region,
                                        _wrap_html("\u231B", **style_kwargs),
                                        layout,
                                        on_navigate=self.on_navigate)

            job_args.append({
                "latex_document":
                latex_document,
                "base_name":
                base_name,
                "color":
                color,
                "p":
                p,
                "cont":
                self._make_cont(p, image_path, time.time(), style_kwargs)
            })

            new_phantoms.append(p)

        # delete deprecated phantoms
        delete_phantoms = [x for x in self.phantoms if x not in new_phantoms]
        for p in delete_phantoms:
            if p.region != sublime.Region(-1):
                view.erase_phantom_by_id(p.id)
        _cancel_image_jobs(view.id(), delete_phantoms)

        # set the new phantoms
        self.phantoms = new_phantoms

        # run the jobs to create the remaining images
        if job_args:
            _extend_image_jobs(view.id(), self.latex_program, job_args)
            _run_image_jobs()
        'sub': '*****@*****.**',
    }


@pytest.fixture
def tokendata_zero_scopes():
    now = int(time.time())
    return {
        'iat': now,
        'exp': now + 30,
        'scopes': [],
        'sub': '*****@*****.**',
    }


ok_response = types.SimpleNamespace(status_code=200)


@pytest.fixture
def middleware():
    reload_settings(TESTSETTINGS)
    return authorization_middleware(lambda r: ok_response)


def test_missing_conf():
    with pytest.raises(config.AuthzConfigurationError):
        authorization_middleware(None)


def test_bad_jwks():
    with pytest.raises(config.AuthzConfigurationError):
def create_request_no_auth_header(path='/', method='GET'):
    return types.SimpleNamespace(META={}, path=path, method=method)
def fake_args(request):
    ns = types.SimpleNamespace()
    ns.backend = 'webengine' if request.config.webengine else 'webkit'
    objreg.register('args', ns)
    yield ns
    objreg.delete('args')
Ejemplo n.º 11
0
def generate_code(request, response):

    # MainFilePath = os.path.join(".", "src/main.cpp")
    # Find all proto files
    file_map = OrderedDict()
    for proto_file in request.proto_file:
        file_map[proto_file.name] = proto_file

    # Create an ordered dictionary for all Protobuf packages
    packages_dict = OrderedDict()

    # Find the *Notification messages to add them to a list
    notification_messages_list = []

    # Find Messages and RPC's that have the same name within the proto files because ROS doesn't handle namespaces
    messages_unordered_set = set()
    rpcs_unordered_set = set()
    duplicated_messages_unordered_set = set()
    duplicated_rpcs_unordered_set = set()
    for filename, proto_file in file_map.items():
        for item, package in traverse(proto_file):

            # Skip the packages we don't want to generate
            if package.split(".")[-1] in NON_GENERATED_PACKAGES:
                continue

            packages_dict[package] = DetailedPackage(package)
            # If the item is a message or an enum
            if not isinstance(item, ServiceDescriptorProto):
                if item.name not in messages_unordered_set:
                    messages_unordered_set.add(item.name)
                    # Add the notifications to a list to match them to their respective RPCs later (because there package may differ from the RPC's)
                    if re.match(r"\w+Notification", item.name):
                        notification_messages_list.append(
                            DetailedMessage(item, package, False))
                else:
                    duplicated_messages_unordered_set.add(item.name)
            # If the item is a Protobuf service (a collection of methods)
            else:
                for method in item.method:
                    if method.name not in rpcs_unordered_set:
                        rpcs_unordered_set.add(method.name)
                    else:
                        duplicated_rpcs_unordered_set.add(method.name)

    # Remove old generated files and create new directories
    for package in packages_dict.values():
        for s in ['srv', 'msg']:
            try:
                shutil.rmtree("../{}/generated/{}".format(
                    s, package.short_name_lowercase_with_underscores))
            except:
                pass
            os.mkdir("../{}/generated/{}".format(
                s, package.short_name_lowercase_with_underscores))
    shutil.rmtree("../src/generated")
    shutil.rmtree("../include/kortex_driver/generated")
    os.mkdir("../src/generated")
    os.mkdir("../include/kortex_driver/generated")

    ###########################################
    # Parse the proto files to add the messages and RPC's to the DetailedPackage's
    for filename, proto_file in file_map.items():

        # For every item in the current proto file
        for item, package in traverse(proto_file):

            # Skip the packages we don't want to generate
            if package.split(".")[-1] in NON_GENERATED_PACKAGES:
                continue

            current_package = packages_dict[package]

            # If this is an enum
            if isinstance(item, EnumDescriptorProto):
                is_enum_duplicated = item.name in duplicated_messages_unordered_set
                current_package.addEnum(
                    DetailedMessage(item, package, is_enum_duplicated))

            # If this is a message
            if isinstance(item, DescriptorProto):
                is_message_duplicated = item.name in duplicated_messages_unordered_set
                duplicated_fields = filter(
                    lambda x: x.type_name.split(".")[-1] in
                    duplicated_messages_unordered_set, item.field)
                temp_message = DetailedMessage(item, package,
                                               is_message_duplicated,
                                               duplicated_fields)
                # Find if the message contains oneof
                message_contains_one_of = False
                for member in item.field:
                    # If a member is part of a one_of, it will have this additional field.
                    if member.HasField("oneof_index"):
                        message_contains_one_of = True
                        break

                # Register every one_of in the message
                if message_contains_one_of:

                    # Find the one_of names
                    for one_of in item.ListFields(
                    )[-1][1]:  # This is the very obscure way to get the one_of's name
                        temp_message.one_of_list.append(
                            DetailedOneOf(one_of.name))

                    # Find the fields and what one_of they belong to
                    for member in item.field:
                        # If a member is part of a one_of, add it to the DetailedOneOf it belongs to
                        if member.HasField("oneof_index"):
                            is_field_duplicated = member.type_name.split(
                                ".")[-1] in duplicated_messages_unordered_set
                            temp_message.one_of_list[
                                member.oneof_index].addField(
                                    member, is_field_duplicated)

                current_package.addMessage(temp_message)

            # If this is a Protobuf service (a group of RPC's)
            if isinstance(item, ServiceDescriptorProto):
                # Register every RPC in the Protobuf service
                for rpc in item.method:
                    # Do not generate the services that cause generation bugs
                    if rpc.name in FORBIDDEN_RPC_METHODS:
                        continue
                    is_rpc_duplicated = rpc.name in duplicated_rpcs_unordered_set
                    is_input_type_duplicated = rpc.input_type.split(
                        ".")[-1] in duplicated_messages_unordered_set
                    is_output_type_duplicated = rpc.output_type.split(
                        ".")[-1] in duplicated_messages_unordered_set
                    notification_message = list(
                        filter(
                            lambda x: x.name == rpc.name.replace(
                                "Topic", "Notification"),
                            notification_messages_list))

                    temp_rpc = DetailedRPC(rpc, package, is_rpc_duplicated,
                                           is_input_type_duplicated,
                                           is_output_type_duplicated,
                                           notification_message)
                    current_package.addRPC(temp_rpc)

    ###########################################
    # Generate the include names with the packages that contain messages
    packages_with_messages = filter(lambda x: len(x.messages) > 0,
                                    packages_dict.values())
    include_file_names = []
    for p in packages_with_messages:
        for s in ["proto", "ros"]:
            include_file_names.append(
                "kortex_driver/generated/{}_{}_converter.h".format(
                    p.short_name.lower(), s))

    # Generate the ROS files for each package
    for package in packages_dict.values():

        # Generate the enums
        for enum in package.enums:
            this_enum_context = types.SimpleNamespace()
            this_enum_context.item = enum
            ros_enum_path = os.path.join(
                "..",
                "msg/generated/{}{}{}.msg".format(enum.containing_folder,
                                                  enum.prepend_message_name,
                                                  enum.name))
            with open(ros_enum_path, 'wt') as serviceFile:
                serviceFile.write(
                    render("../templates/ros_enum.msg.jinja2",
                           this_enum_context.__dict__))

        # Generate the messages
        for message in package.messages:
            this_message_context = types.SimpleNamespace()
            this_message_context.item = message
            this_message_context.field_descriptor_class = FieldDescriptor

            # Generate the one_of's for the message
            for detailed_one_of in message.one_of_list:  # not empty
                this_message_context.detailed_one_of = detailed_one_of
                ros_oneofPath = os.path.join(
                    "..", "msg/generated/{}{}{}_{}.msg".format(
                        message.containing_folder,
                        message.prepend_message_name, message.name,
                        detailed_one_of.name))
                with open(ros_oneofPath, 'wt') as serviceFile:
                    serviceFile.write(
                        render("../templates/ros_oneof.msg.jinja2",
                               this_message_context.__dict__))

            # Generate the message
            ros_messagePath = os.path.join(
                "..",
                "msg/generated/{}{}{}.msg".format(message.containing_folder,
                                                  message.prepend_message_name,
                                                  message.name))
            with open(ros_messagePath, 'wt') as serviceFile:
                serviceFile.write(
                    render("../templates/ros_message.msg.jinja2",
                           this_message_context.__dict__))

        # Generate the RPC's
        for rpc in package.methods:
            this_rpc_context = types.SimpleNamespace()
            this_rpc_context.item = rpc
            ros_servicePath = os.path.join(
                "..", "srv/generated/{}/{}{}{}.srv".format(
                    package.short_name_lowercase_with_underscores,
                    rpc.prepend_rpc_package_name, rpc.prepend_on_notification,
                    rpc.name))
            with open(ros_servicePath, 'wt') as serviceFile:
                serviceFile.write(
                    render("../templates/ros_service.srv.jinja2",
                           this_rpc_context.__dict__))

        # Generate the Proto-ROS converters (C++ files)
        this_package_context = types.SimpleNamespace()
        this_package_context.package = package

        if package.messages:  # package contains at least one message
            # Proto converter header file
            current_header_filename = "kortex_driver/generated/{}_proto_converter.h".format(
                package.short_name.lower())
            this_package_context.include_file_names = filter(
                lambda x: "proto_converter" in x and x !=
                current_header_filename, include_file_names)
            with open(os.path.join("..", "include/" + current_header_filename),
                      'wt') as converterFile:
                converterFile.write(
                    render("../templates/proto_converter.h.jinja2",
                           this_package_context.__dict__))
            # Proto converter cpp file
            this_package_context.current_header_filename = current_header_filename
            with open(
                    os.path.join(
                        "..", "src/generated/{}_proto_converter.cpp".format(
                            package.short_name.lower())),
                    'wt') as converterFile:
                converterFile.write(
                    render("../templates/proto_converter.cpp.jinja2",
                           this_package_context.__dict__))
            # ROS converter header file
            current_header_filename = "kortex_driver/generated/{}_ros_converter.h".format(
                package.short_name.lower())
            this_package_context.include_file_names = filter(
                lambda x: "ros_converter" in x and x !=
                current_header_filename, include_file_names)
            with open(os.path.join("..", "include/" + current_header_filename),
                      'wt') as converterFile:
                converterFile.write(
                    render("../templates/ros_converter.h.jinja2",
                           this_package_context.__dict__))
            # ROS converter cpp file
            this_package_context.current_header_filename = current_header_filename
            with open(
                    os.path.join(
                        "..", "src/generated/{}_ros_converter.cpp".format(
                            package.short_name.lower())),
                    'wt') as converterFile:
                converterFile.write(
                    render("../templates/ros_converter.cpp.jinja2",
                           this_package_context.__dict__))

        # Generate the ServiceProxy's for every Kortex API method
        if package.methods:  # package contains at least one RPC
            current_header_filename = "kortex_driver/generated/{}_services.h".format(
                package.short_name.lower())
            this_package_context.current_header_filename = current_header_filename
            this_package_context.include_file_names = include_file_names
            with open(os.path.join("..", "include/" + current_header_filename),
                      'wt') as services_file:
                services_file.write(
                    render("../templates/services.h.jinja2",
                           this_package_context.__dict__))
            with open(
                    os.path.join(
                        "..", "src/generated/{}_services.cpp".format(
                            package.short_name.lower())),
                    'wt') as services_file:
                services_file.write(
                    render("../templates/services.cpp.jinja2",
                           this_package_context.__dict__))

    # Delete unused folders we created for None
    for package in packages_dict.values():
        for s in ['srv', 'msg']:
            if len(
                    os.listdir('../{}/generated/{}'.format(
                        s,
                        package.short_name_lowercase_with_underscores))) == 0:
                shutil.rmtree("../{}/generated/{}".format(
                    s, package.short_name_lowercase_with_underscores))
Ejemplo n.º 12
0
def loadTestArgs():
    templatePath = os.path.abspath(os.path.join("test", "si.in"))
    nameSpace = types.SimpleNamespace(elementList=["Mg"],
                                      templatePath=templatePath)
    return nameSpace
Ejemplo n.º 13
0
def test_return_for_single_tag(monkeypatch, capsys):
    cssdata = ()
    assert cssedit.return_for_single_tag(cssdata) == ''
    cssdata = (types.SimpleNamespace(style=MockStyle()), )
    assert cssedit.return_for_single_tag(cssdata) == 'style text'
Ejemplo n.º 14
0
    def __init__(self, server_id, address, n):
        """
    Sets up this server to (1) connect to other servers and (2) connect to client
    """
        self.sel = selectors.DefaultSelector()
        self.logger = logging.getLogger('Server')
        logging.basicConfig(filename='test.log', level=logging.DEBUG)

        self.client_port: int = address[1]
        self.server_id: int = server_id
        self.server_port: int = 20000 + self.server_id

        # establish a listening TCP endpoint to client
        self.client_lsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.client_lsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
        # establish a listening TCP endpoint for other servers
        self.server_lsock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
        self.server_lsock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)

        self.n = n

        # bind to client-facing port; assumed guaranteed to be available
        while True:
            try:
                self.client_lsock.bind(address)
                break
            except:
                pass

        # bind to server-facing port; available by port assigning convention
        while True:
            try:
                self.server_lsock.bind(('localhost', self.server_port))
                break
            except:
                pass

        self.client_csock = None

        # map server IDs to sockets
        self.id_to_sock = {}

        # establish connections to all preceding servers
        for i in range(n):
            if i == self.server_id:
                continue

            # create a connecting socket for servers that have been started
            addr = 20000 + i
            print(self.server_id, 'connecting to', addr)
            csock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            csock.setblocking(False)
            csock.connect_ex(('localhost', addr))
            events = selectors.EVENT_READ | selectors.EVENT_WRITE
            data = types.SimpleNamespace(connid=i, outb=b'')
            self.sel.register(csock, events, data=data)

        # finish setting up our client listening socket
        self.client_lsock.listen(int(n))
        self.client_lsock.setblocking(False)
        self.sel.register(self.client_lsock, selectors.EVENT_READ, data=None)

        # finish setting up our server listening socket
        self.server_lsock.listen(int(n))
        self.server_lsock.setblocking(False)
        self.sel.register(self.server_lsock, selectors.EVENT_READ, data=None)

        self.lock = threading.RLock()

        # raft-specific persistent state on ALL servers
        self.current_term = 0
        self.voted_for = None  # reset always when current term changes
        self.log = {}  # log[index] = (cmd, term)

        # volatile state on ALL servers
        self.commit_index = 0
        self.last_applied = 0

        self.state = 'follower'  # valid states: 'follower', 'leader', 'candidate'
        self.store = {}
        self.application_thr = threading.Thread(name='application_thr',
                                                target=self.apply_committed)
        self.application_thr.daemon = True
        self.application_thr_cv = threading.Condition()
        self.application_thr.start()
        # https://www.cl.cam.ac.uk/~ms705/pub/papers/2015-osr-raft.pdf section 4
        self.election_timeout_lower = 5000
        self.election_timeout = random.randint(
            self.election_timeout_lower, self.election_timeout_lower *
            2)  # slowed to allow sufficient time to see changes
        self.election_timer = threading.Timer(self.election_timeout / 1000,
                                              self.convert_to_candidate)
        self.election_timer.daemon = True

        self.leader_timeout = self.election_timeout_lower / 2
        self.heartbeat_timer = threading.Timer(self.leader_timeout / 1000,
                                               self.send_heartbeats)
        self.heartbeat_timer.daemon = True
        self.leader = None

        # volatile state ONLY for leaders, None otherwise
        self.match_index = None
        self.next_index = None
        # TODO. array of queues for retrying indefinitely for servers that have failed
        self.pending_rpcs = None

        # state for candidates
        self.vote_count = 0
Ejemplo n.º 15
0
def get_token_stream(file_obj):
    """
    returns generator for stream of tokens read from IO object

    :param file_obj: io-like object with read method
    :returns: generator that returns Token objects
    """

    token_reader = RadixReader.from_list(TOKEN_LIST)

    rollover = False
    read_buffer = []

    closure = types.SimpleNamespace()
    closure.line_num = 1
    closure.char_num = 0

    def read_char():
        if len(read_buffer) == 0:
            closure.char_num += 1
            c = file_obj.read(1)
            if c == "\n":
                closure.line_num += 1
            token_str = TokenString(c,
                                    line_num=closure.line_num,
                                    char_num=closure.char_num)
            return token_str
        else:
            return read_buffer.pop(0)

    def read_while(read_buffer, char_buffer, pred):
        while True:
            c = read_char()
            if c == "" or not pred(c):
                break

            char_buffer.append(c)

        return [c] + read_buffer

    def maybe_implicit_token(string, token_class):
        token_reader.reset()
        if (not token_reader.add(string)
                or token_reader.last_string != string):
            return token_class(string)
        return string

    while True:
        c = read_char()

        if c == "":
            break

        if is_stop_char(c):
            continue

        # read variable names
        if is_letter(c):
            char_buffer = [c]
            read_buffer = read_while(read_buffer, char_buffer,
                                     valid_string_char)

            yield (maybe_implicit_token(TokenString.join(char_buffer),
                                        Tokens._Variable))
            continue

        # read numbers
        if is_digit(c):
            char_buffer = [c]
            read_buffer = read_while(read_buffer, char_buffer, is_digit)

            yield (maybe_implicit_token(TokenString.join(char_buffer),
                                        Tokens._Number))
            continue

        accepted_any = False
        token_reader.reset()
        while token_reader._add(c):
            accepted_any = True
            c = read_char()
            if c == "":
                break

        if not accepted_any:
            yield c
        else:
            unmatched_buffer = [x for x in token_reader.unmatched]
            read_buffer = (unmatched_buffer + [c] + read_buffer)

            result = token_reader.last_string
            if result == "":
                continue

            yield result

            if result == Tokens.BEGIN_COMMENT:
                # read in rest of comment
                char_buffer = []
                read_buffer = read_while(read_buffer, char_buffer,
                                         lambda x: x != "\n")

                yield Tokens._Comment(TokenString.join(char_buffer))
Ejemplo n.º 16
0
    def get_with_metadata(self,
                          *,
                          count=None,
                          as_string=False,
                          as_numpy=True,
                          timeout=None,
                          with_ctrlvars=False,
                          use_monitor=True,
                          form=None,
                          as_namespace=False):
        """Returns a dictionary of the current value and associated metadata

        Parameters
        ----------
        count : int, optional
             explicitly limit count for array data
        as_string : bool, optional
            flag(True/False) to get a string representation
            of the value.
        as_numpy : bool, optional
            use numpy array as the return type for array data.
        timeout : float, optional
            maximum time to wait for value to be received.
            (default = 0.5 + log10(count) seconds)
        use_monitor : bool, optional
            use value from latest monitor callback (True, default)
            or to make an explicit CA call for the value.
        form : {'time', 'ctrl', None}
            Optionally change the type of the get request
        as_namespace : bool, optional
            Change the return type to that of a namespace with support for
            tab-completion

        Returns
        -------
        val : dict or namespace
           The dictionary of data, guaranteed to at least have the 'value' key.
           Depending on the request form, other keys may also be present::
               {'precision', 'units', 'status', 'severity', 'enum_strs',
               'status', 'severity', 'timestamp', 'posixseconds',
               'nanoseconds', 'upper_disp_limit', 'lower_disp_limit',
               'upper_alarm_limit', 'upper_warning_limit',
               'lower_warning_limit','lower_alarm_limit', 'upper_ctrl_limit',
               'lower_ctrl_limit'}
           Returns ``None`` if the channel is not connected, `wait=False` was used,
           or the data transfer timed out.
        """
        if form is None:
            form = self.form

        if count is None:
            count = self.default_count

        if timeout is None:
            if count is None:
                timeout = 1.0
            else:
                timeout = 1.0 + log10(max(1, count))

        type_key = 'control' if form == 'ctrl' else form

        if (with_ctrlvars and type_key not in ('control', 'native')):
            md = self.get_with_metadata(count=count,
                                        as_string=as_string,
                                        as_numpy=as_numpy,
                                        timeout=timeout,
                                        with_ctrlvars=False,
                                        use_monitor=use_monitor,
                                        form='control',
                                        as_namespace=False)
        elif use_monitor:
            md = self._args.copy()
        else:
            md = {}

        dt = field_types[type_key][self.type]
        if not as_string and dt in ca.char_types:
            re_map = {
                ChannelType.CHAR: ChannelType.INT,
                ChannelType.CTRL_CHAR: ChannelType.CTRL_INT,
                ChannelType.TIME_CHAR: ChannelType.TIME_INT,
                ChannelType.STS_CHAR: ChannelType.STS_INT
            }
            dt = re_map[dt]
            # TODO if you want char arrays not as_string
            # force no-monitor rather than
            use_monitor = False

        cached_value = self._args['value']

        # trigger going out to got data from network
        if ((not use_monitor) or (self._auto_monitor_sub is None)
                or (cached_value is None)
                or (count is not None and count > len(cached_value))):
            command = self._caproto_pv.read(data_type=dt,
                                            data_count=count,
                                            timeout=timeout)
            response = _read_response_to_pyepics(self.typefull, command)
            self._args.update(**response)
            md.update(**response)

        if as_string and self.typefull in ca.enum_types:
            enum_strs = self.enum_strs
        else:
            enum_strs = None

        md['value'] = _pyepics_get_value(value=md['raw_value'],
                                         string_value=md['char_value'],
                                         full_type=self.typefull,
                                         native_count=self._args['count'],
                                         requested_count=count,
                                         enum_strings=enum_strs,
                                         as_string=as_string,
                                         as_numpy=as_numpy)

        if as_namespace:
            return types.SimpleNamespace(**md)
        return md
Ejemplo n.º 17
0
__default_host__ = _os.environ.get('JINA_DEFAULT_HOST', '0.0.0.0')
__ready_msg__ = 'ready and listening'
__stop_msg__ = 'terminated'
__binary_delimiter__ = _os.environ.get(
    'JINA_BINARY_DELIMITER', '460841a0a8a430ae25d9ad7c1f048c57').encode()
__root_dir__ = _os.path.dirname(_os.path.abspath(__file__))

_names_with_underscore = [
    '__version__', '__copyright__', '__license__', '__proto_version__',
    '__default_host__', '__ready_msg__', '__stop_msg__',
    '__binary_delimiter__', '__jina_env__', '__uptime__', '__root_dir__'
]

# ADD GLOBAL NAMESPACE VARIABLES

JINA_GLOBAL = _types.SimpleNamespace()
JINA_GLOBAL.logserver = _types.SimpleNamespace()

import jina.importer as _ji

# driver first, as executor may contain driver
_ji.import_classes('jina.drivers', show_import_table=False, import_once=True)
_ji.import_classes('jina.executors', show_import_table=False, import_once=True)
_ji.import_classes('jina.hub', show_import_table=False, import_once=True)

_signal.signal(_signal.SIGINT, _signal.default_int_handler)


def _set_nofile(nofile_atleast=4096):
    """
    sets nofile soft limit to at least 4096, useful for running matlplotlib/seaborn on
Ejemplo n.º 18
0
        print(key + ' : ' + str(value))
    print()


def main():
    """
    Main function
    """
    # Print all Flags to confirm parameter settings
    print_flags()

    # Run the training operation
    eval()


FLAGS = types.SimpleNamespace()
if __name__ == '__main__':
    # Command line arguments
    parser = argparse.ArgumentParser()
    parser.add_argument('--model_name', type = str, default = MODEL_NAME_DEFAULT,
                        help='Name of the model type to train (baseline, )')
    parser.add_argument('--checkpoint_path', type = str, default = CHECKPOINT_PATH_DEFAULT,
                        help='Path to a model checkpoint')
    parser.add_argument('--output_dir', type = str, default = OUTPUT_DIR_DEFAULT,
                        help='Directory to write output to')
    parser.add_argument('--data_path', type=str, default=DATA_PATH_DEFAULT,
                        help='Path to the SNLI test dataset')
    parser.add_argument('--embedding_path', type=str, default=EMBEDDING_PATH_DEFAULT,
                        help='Path to word embedding file')
    FLAGS, unparsed = parser.parse_known_args()
Ejemplo n.º 19
0
def test__perform_realtime_02(server):
    event = supriya.patterns.NoteEvent(duration=1.0,
                                       delta=10.0,
                                       frequency=[443, 445, 447])
    server = types.SimpleNamespace(
        node_id_allocator=supriya.realtime.NodeIdAllocator())
    event_products = event._perform_realtime(server=server,
                                             timestamp=100.0,
                                             uuids={})
    assert pytest.helpers.get_objects_as_string(
        event_products, replace_uuids=True) == uqbar.strings.normalize("""
        EventProduct(
            event=NoteEvent(
                delta=10.0,
                duration=1.0,
                frequency=[443, 445, 447],
                ),
            requests=[
                SynthNewRequest(
                    add_action=AddAction.ADD_TO_HEAD,
                    frequency=443,
                    node_id=1000,
                    synthdef=<SynthDef: default>,
                    target_node_id=1,
                    ),
                SynthNewRequest(
                    add_action=AddAction.ADD_TO_HEAD,
                    frequency=445,
                    node_id=1001,
                    synthdef=<SynthDef: default>,
                    target_node_id=1,
                    ),
                SynthNewRequest(
                    add_action=AddAction.ADD_TO_HEAD,
                    frequency=447,
                    node_id=1002,
                    synthdef=<SynthDef: default>,
                    target_node_id=1,
                    ),
                ],
            timestamp=100.0,
            uuid=UUID('A'),
            )
        EventProduct(
            event=NoteEvent(
                delta=10.0,
                duration=1.0,
                frequency=[443, 445, 447],
                ),
            is_stop=True,
            requests=[
                NodeSetRequest(
                    gate=0,
                    node_id=1000,
                    ),
                NodeSetRequest(
                    gate=0,
                    node_id=1001,
                    ),
                NodeSetRequest(
                    gate=0,
                    node_id=1002,
                    ),
                ],
            timestamp=101.0,
            uuid=UUID('A'),
            )
        """)
Ejemplo n.º 20
0
from importlib import machinery, util, invalidate_caches
from importlib.abc import ResourceReader
import io
import os
import os.path
from pathlib import Path, PurePath
from test import support
import unittest
import sys
import tempfile
import types

from . import data01
from . import zipdata01

BUILTINS = types.SimpleNamespace()
BUILTINS.good_name = None
BUILTINS.bad_name = None
if 'errno' in sys.builtin_module_names:
    BUILTINS.good_name = 'errno'
if 'importlib' not in sys.builtin_module_names:
    BUILTINS.bad_name = 'importlib'

EXTENSIONS = types.SimpleNamespace()
EXTENSIONS.path = None
EXTENSIONS.ext = None
EXTENSIONS.filename = None
EXTENSIONS.file_path = None
EXTENSIONS.name = '_testcapi'

Ejemplo n.º 21
0
    '__uptime__',
    '__root_dir__',
]

# Primitive data type,
# note, they must be loaded BEFORE all executors/drivers/... to avoid cyclic imports
from jina.types.ndarray.generic import NdArray
from jina.types.request import Request, Response
from jina.types.message import Message
from jina.types.querylang import QueryLang
from jina.types.document import Document
from jina.types.document.multimodal import MultimodalDocument
from jina.types.arrays import DocumentArray, QueryLangArray

# ADD GLOBAL NAMESPACE VARIABLES
JINA_GLOBAL = _types.SimpleNamespace()
JINA_GLOBAL.scipy_installed = None
JINA_GLOBAL.tensorflow_installed = None
JINA_GLOBAL.torch_installed = None

import jina.importer as _ji

# driver first, as executor may contain driver
_ji.import_classes('jina.drivers', show_import_table=False, import_once=True)
_ji.import_classes('jina.executors', show_import_table=False, import_once=True)
_ji.import_classes('jina.hub', show_import_table=False, import_once=True)

_signal.signal(_signal.SIGINT, _signal.default_int_handler)


def _set_nofile(nofile_atleast=4096):
Ejemplo n.º 22
0
    async def _test_content_db(self, emirrordist_cmd, env, layouts, content_db,
                               distfiles, settings, portdb):
        # Simulate distfile digest change for ContentDB.
        emdisopts = types.SimpleNamespace(content_db=content_db,
                                          distfiles=settings["DISTDIR"])
        with EmirrordistConfig(emdisopts, portdb,
                               asyncio.get_event_loop()) as emdisconf:
            # Copy revisions from bar to foo.
            for revision_key in emdisconf.content_db["filename:{}".format(
                    "bar")]:
                emdisconf.content_db.add(
                    DistfileName("foo", digests=dict(revision_key)))

            # Copy revisions from foo to bar.
            for revision_key in emdisconf.content_db["filename:{}".format(
                    "foo")]:
                emdisconf.content_db.add(
                    DistfileName("bar", digests=dict(revision_key)))

            content_db_state = dict(emdisconf.content_db.items())
            self.assertEqual(content_db_state,
                             dict(emdisconf.content_db.items()))
            self.assertEqual(
                [
                    k[len("filename:"):]
                    for k in content_db_state if k.startswith("filename:")
                ],
                ["bar", "foo"],
            )
            self.assertEqual(content_db_state["filename:foo"],
                             content_db_state["filename:bar"])
            self.assertEqual(len(content_db_state["filename:foo"]), 2)

        for k in distfiles:
            try:
                os.unlink(os.path.join(settings["DISTDIR"], k))
            except OSError:
                pass

        proc = await asyncio.create_subprocess_exec(*emirrordist_cmd, env=env)
        self.assertEqual(await proc.wait(), 0)

        for k in distfiles:
            with open(
                    os.path.join(settings["DISTDIR"], layouts[0].get_path(k)),
                    "rb") as f:
                self.assertEqual(f.read(), distfiles[k])

        with EmirrordistConfig(emdisopts, portdb,
                               asyncio.get_event_loop()) as emdisconf:
            self.assertEqual(content_db_state,
                             dict(emdisconf.content_db.items()))

            # Verify that remove works as expected
            filename = [
                filename for filename in distfiles if filename == "foo"
            ][0]
            self.assertTrue(bool(filename.digests))
            emdisconf.content_db.remove(filename)
            # foo should still have a content revision corresponding to bar's content.
            self.assertEqual(
                [
                    k[len("filename:"):]
                    for k in emdisconf.content_db if k.startswith("filename:")
                ],
                ["bar", "foo"],
            )
            self.assertEqual(len(emdisconf.content_db["filename:foo"]), 1)
            self.assertEqual(
                len([
                    revision_key
                    for revision_key in emdisconf.content_db["filename:foo"]
                    if not filename.digests_equal(
                        DistfileName(
                            "foo",
                            digests=dict(revision_key),
                        ))
                ]),
                1,
            )
            # bar should still have a content revision corresponding to foo's content.
            self.assertEqual(len(emdisconf.content_db["filename:bar"]), 2)
            self.assertEqual(
                len([
                    revision_key
                    for revision_key in emdisconf.content_db["filename:bar"]
                    if filename.digests_equal(
                        DistfileName(
                            "bar",
                            digests=dict(revision_key),
                        ))
                ]),
                1,
            )
            # remove the foo which refers to bar's content
            bar = [filename for filename in distfiles if filename == "bar"][0]
            foo_remaining = DistfileName("foo", digests=bar.digests)
            emdisconf.content_db.remove(foo_remaining)
            self.assertEqual(
                [
                    k[len("filename:"):]
                    for k in emdisconf.content_db if k.startswith("filename:")
                ],
                ["bar"],
            )
            self.assertRaises(KeyError, emdisconf.content_db.__getitem__,
                              "filename:foo")
            # bar should still have a content revision corresponding to foo's content.
            self.assertEqual(len(emdisconf.content_db["filename:bar"]), 2)
Ejemplo n.º 23
0
 def _process(v):
     if isinstance(v, dict):
         sub_args = UvnConfig._to_sns_args(v)
         return types.SimpleNamespace(**sub_args)
     else:
         return v
Ejemplo n.º 24
0
def main():

    # Check args for if we're going to compile a single file instead
    m_matchingPattern = None
    if (len(sys.argv) > 1):
        m_matchingPattern = sys.argv[1]

    # Grab directory
    l_directoryPath = os.path.dirname(os.path.realpath(__file__))
    m_projectRootPath = os.path.split(l_directoryPath)[0] + "/"

    # Set up display
    m_buildSucceeded = 0
    m_buildFailed = 0
    m_buildUpToDate = 0
    m_buildSkipped = 0

    # Grab include foldrs
    m_includeFolders = CollectIncludeFolders(m_projectRootPath)

    # Loop through each .res folder to compile shaders.
    for i_filename in os.listdir(m_projectRootPath):

        # Only work on res folders:
        if not (".res" in i_filename):
            continue
        if ("backup" in i_filename):
            continue

        # Check if the folder exists
        l_resourceFolder = m_projectRootPath + i_filename + "/shaders"
        print(m_outp + "	Scanning in " + bcolors.UNDERLINE + i_filename +
              bcolors.ENDC + "...")
        if not os.path.isdir(l_resourceFolder):
            continue

        # Loop through each file
        for subdir, dirs, files in os.walk(l_resourceFolder):
            for file in files:
                if (file.endswith(".glsl") and ((m_matchingPattern is None) or
                                                (m_matchingPattern in file))):
                    # Get the file path & display it
                    l_shaderFilePath = os.path.join(subdir, file)
                    l_displayName = l_shaderFilePath[len(m_projectRootPath):]
                    print(m_outp + l_displayName)

                    # Need extensionless filename
                    l_nakedFile = os.path.splitext(l_shaderFilePath)[0]

                    # Create default ouput
                    l_outputFilePath = l_nakedFile + ".spv"

                    # Check for variants
                    l_variantsFile = l_nakedFile + ".variants.h"
                    l_variants = []
                    if (os.path.exists(l_variantsFile)):
                        l_variants = ReadVariants(l_variantsFile)

                    # Compile the shader
                    if (not l_variants):
                        l_status = CompileShader(l_shaderFilePath,
                                                 l_outputFilePath,
                                                 m_includeFolders, [])
                        if (l_status == 1):
                            m_buildSkipped += 1
                        elif (l_status == 0):
                            m_buildSucceeded += 1
                        elif (l_status == 2):
                            m_buildFailed += 1
                    else:
                        # Cache all permuations of macros
                        l_macroVariants = []
                        for variant in l_variants:
                            l_newMacroVariants = []
                            # For each variant value...
                            for variant_value in variant.values:
                                # Create macro that needs to be added to every macro list
                                macro = types.SimpleNamespace(
                                    name=variant.name,
                                    value=str(variant_value))
                                if (l_macroVariants):
                                    # Add macro to every existing macro list
                                    for old_macro_list in l_macroVariants:
                                        new_macro_list = old_macro_list.copy()
                                        new_macro_list.append(macro)
                                        l_newMacroVariants.append(
                                            new_macro_list)
                                else:
                                    #If macro list is empty, then we just start with size-1 lists
                                    l_newMacroVariants.append([macro])
                            # Save the current list
                            l_macroVariants = l_newMacroVariants

                        l_macroVariantIndex = 0
                        for l_macroVariant in l_macroVariants:

                            l_outputFilePath = l_nakedFile + "_" + str(
                                l_macroVariantIndex) + ".spv"
                            print(m_outp + "\tOutput: " + l_outputFilePath)

                            l_status = CompileShader(l_shaderFilePath,
                                                     l_outputFilePath,
                                                     m_includeFolders,
                                                     l_macroVariant)
                            if (l_status == 1):
                                m_buildSkipped += 1
                            elif (l_status == 0):
                                m_buildSucceeded += 1
                            elif (l_status == 2):
                                m_buildFailed += 1

                            l_macroVariantIndex += 1

    print(
        (bcolors.WARNING if m_buildFailed > 0 else bcolors.OKGREEN) +
        "Shader Build: {:d} succeeded, {:d} failed, {:d} up-to-date, {:d} skipped."
        .format(m_buildSucceeded, m_buildFailed, m_buildUpToDate,
                m_buildSkipped) + bcolors.ENDC)
    return (0 if m_buildSucceeded else 1)  # Done.
	def test_small_simple_namespace(self):
		ns = types.SimpleNamespace(a=1, b=2)
		formatted = FancyPrinter().pformat(ns)
		assert formatted == "namespace(a=1, b=2)"
Ejemplo n.º 26
0
    def prepare_textacy_widgets(self):

        item_layout = dict(
            display='flex',
            flex_flow='row',
            justify_content='space-between',
        )

        pos_options = get_pos_options(self.tagset)

        normalize_options = {'None': False, 'Lemma': 'lemma', 'Lower': 'lower'}
        ngrams_options = {'1': [1], '1, 2': [1, 2], '1,2,3': [1, 2, 3]}
        default_include_pos = ['NOUN', 'PROPN']
        frequent_words = ['_mask_']
        # widgets.Label(
        gui = types.SimpleNamespace(
            #min_freq=widgets.IntSlider(description='Min word freq',min=0, max=10, value=2, step=1, layout=widgets.Layout(width='240px', **item_layout)),
            #max_doc_freq=widgets.IntSlider(description='Min doc %', min=75, max=100, value=100, step=1, layout=widgets.Layout(width='240px', **item_layout)),
            min_freq=widgets.Dropdown(description='Min word freq',
                                      options=list(range(0, 11)),
                                      value=2,
                                      layout=widgets.Layout(width='200px',
                                                            **item_layout)),
            max_doc_freq=widgets.Dropdown(description='Min doc %',
                                          options=list(range(75, 101)),
                                          value=100,
                                          layout=widgets.Layout(
                                              width='200px', **item_layout)),
            ngrams=widgets.Dropdown(description='n-grams',
                                    options=ngrams_options,
                                    value=[1],
                                    layout=widgets.Layout(width='200px')),
            normalize=widgets.Dropdown(description='Normalize',
                                       options=normalize_options,
                                       value='lemma',
                                       layout=widgets.Layout(width='200px')),
            filter_stops=widgets.ToggleButton(value=True,
                                              description='Remove stopword',
                                              tooltip='Filter out stopwords',
                                              icon='check'),
            named_entities=widgets.ToggleButton(value=False,
                                                description='Merge entities',
                                                tooltip='Merge entities',
                                                icon='check',
                                                disabled=False),
            substitute_terms=widgets.ToggleButton(value=False,
                                                  description='Map words',
                                                  tooltip='Substitute words',
                                                  icon='check'),
            include_pos=widgets.SelectMultiple(
                options=pos_options,
                value=default_include_pos,
                rows=7,
                layout=widgets.Layout(width='60px', **item_layout)),
            stop_words=widgets.SelectMultiple(
                options=frequent_words,
                value=list([]),
                rows=7,
                layout=widgets.Layout(width='120px', **item_layout)),
        )
        boxes = [
            widgets.VBox([
                gui.min_freq,
                gui.max_doc_freq,
                gui.normalize,
                gui.ngrams,
            ]),
            widgets.VBox(
                [gui.filter_stops, gui.named_entities, gui.substitute_terms],
                layout=widgets.Layout(margin='0px 0px 0px 10px')),
            widgets.HBox([
                widgets.Label(value='POS', layout=widgets.Layout(
                    width='40px')), gui.include_pos
            ],
                         layout=widgets.Layout(margin='0px 0px 0px 10px')),
            widgets.HBox([widgets.Label(value='STOP'), gui.stop_words],
                         layout=widgets.Layout(margin='0px 0px 0px 10px'))
        ]
        return gui, boxes
Ejemplo n.º 27
0
 def ParseResponseJSON(self):
     self.ResponseData = json.loads(
         self.ResponseText,
         object_hook=lambda d: types.SimpleNamespace(**d))
Ejemplo n.º 28
0
    def ensure_directories(self, env_dir):
        """
        Create the directories for the environment.

        Returns a context object which holds paths in the environment,
        for use by subsequent logic.
        """
        def create_if_needed(d):
            if not os.path.exists(d):
                os.makedirs(d)
            elif os.path.islink(d) or os.path.isfile(d):
                raise ValueError('Unable to create directory %r' % d)

        if os.path.exists(env_dir) and self.clear:
            self.clear_directory(env_dir)
        context = types.SimpleNamespace()
        context.env_dir = env_dir
        context.env_name = os.path.split(env_dir)[1]
        prompt = self.prompt if self.prompt is not None else context.env_name
        context.prompt = '(%s) ' % prompt
        create_if_needed(env_dir)
        env = os.environ
        executable = getattr(sys, '_base_executable', sys.executable)
        dirname, exename = os.path.split(os.path.abspath(executable))
        context.executable = executable
        context.python_dir = dirname
        context.python_exe = exename

        if sys.platform == 'win32':
            binname = 'Scripts'
            incpath = 'Include'
            libpath = os.path.join(env_dir, 'Lib', 'site-packages')
        else:
            binname = 'bin'
            incpath = 'include'
            libpath = os.path.join(env_dir, 'lib',
                                   'python%d.%d' % sys.version_info[:2],
                                   'site-packages')

        # Truffle change: our executable may not just be a file (e.g. we're
        # running through java), we always provide a script for launching in
        # venv
        exename = context.python_exe = "graalpython"

        import atexit, tempfile
        tempdir = tempfile.mkdtemp()
        script = os.path.join(tempdir, "graalpython")
        if sys.platform == 'win32':
            script += ".bat"

        with open(script, "w") as f:
            if sys.platform != "win32":
                f.write("#!/bin/sh\n")
            f.write(sys.executable)
            f.write(
                " --python.CoreHome='%s' --python.StdLibHome='%s' --python.SysPrefix='%s' --python.SysBasePrefix='%s' --python.Executable='%s'"
                % (
                    sys.graal_python_core_home,
                    sys.graal_python_stdlib_home,
                    context.env_dir,
                    sys.base_prefix,
                    os.path.join(context.env_dir, binname, exename),
                ))
            if sys.platform == "win32":
                f.write(" %*")
            else:
                f.write(" \"$@\"")

        if sys.platform != "win32":
            os.chmod(script, 0o777)

        atexit.register(lambda: shutil.rmtree(tempdir, ignore_errors=True))

        dirname = context.python_dir = sys.graal_python_home
        context.executable = script

        if self.symlinks:
            logger.warning("We're not using symlinks in a Graal Python venv")
        self.symlinks = False
        # End of Truffle change

        context.inc_path = path = os.path.join(env_dir, incpath)
        create_if_needed(path)
        create_if_needed(libpath)
        # Issue 21197: create lib64 as a symlink to lib on 64-bit non-OS X POSIX
        if ((sys.maxsize > 2**32) and (os.name == 'posix')
                and (sys.platform != 'darwin')):
            link_path = os.path.join(env_dir, 'lib64')
            if not os.path.exists(link_path):  # Issue #21643
                os.symlink('lib', link_path)
        context.bin_path = binpath = os.path.join(env_dir, binname)
        context.bin_name = binname
        context.env_exe = os.path.join(binpath, exename)
        create_if_needed(binpath)
        return context
Ejemplo n.º 29
0
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.

import os
import types

script = types.SimpleNamespace()
script.random = script_cpp.random
script.wait = script_cpp.wait
script.time = script_cpp.time
script.timer = script_cpp.timer
script.system = script_cpp.system
script.writeToFile = script_cpp.writeToFile
script.writeData = script_cpp.writeData
script.readAll = script_cpp.readAll
script.removeFile = script_cpp.removeFile
script.run = script_cpp.run
script.quit = script_cpp.quit
script.reset = script_cpp.reset
script.getPhoto = script_cpp.getPhoto

Ejemplo n.º 30
0
                    "Parameter functions with array arguments: all the arguments must be arrays of the same shape."
                )
            for k in args[1:]:
                if len(k) != len(args[0]):
                    raise ValueError(
                        "Parameter functions with array arguments: all the arguments must be arrays of the same shape."
                    )
            # apply func elementwise, recursively, on the args
            return np.array([wrapper(*k) for k in zip(*args)])
        return func(*args)

    return wrapper


par_funcs = types.SimpleNamespace(
    **{name: wrap_mathfunc(getattr(sf, name)) for name in dir(sf) if name[0] != "_"}
)
"""SimpleNamespace: Namespace of mathematical functions for manipulating Parameters.
Consists of all :mod:`sympy.functions` public members, which we wrap with :func:`wrap_mathfunc`.
"""


class ParameterError(RuntimeError):
    """Exception raised when the Parameter classes encounter an illegal operation.

    E.g., trying to use a measurement result before it is available.
    """


def is_object_array(p):
    """Returns True iff p is an object array.