Esempio n. 1
0
def get_default(nick, session, send, config, source):
    location = session.query(Weather_prefs.location).filter(Weather_prefs.nick == nick).scalar()
    if location is None:
        try:
            # attempt to get GeoIP location, can fail if the DB isn't available, hostmask doesn't have
            # an IP, etc.
            hostmask = source.split('@')[1]
            hostip = re.search(r"\d{1,3}[.-]\d{1,3}[.-]\d{1,3}[.-]\d{1,3}", hostmask)
            # If that failed, could be a v6 addr
            if not hostip:
                try:
                    socket.inet_pton(socket.AF_INET6, hostmask)
                    hostip = hostmask
                except socket.error:
                    pass
            else:
                hostip = hostip.group()
            if hostip:
                hostip = re.sub('-', '.', hostip)
                with resources.path(static, config['db']['geoip']) as db_file:
                    location = get_zipcode(str(db_file), hostip)
                if location is not None:
                    send("No default location for %s, GeoIP guesses that your zip code is %s." % (nick, location))
                    return location
        except (FileNotFoundError, geoip2.errors.AddressNotFoundError):
            pass
        # default to TJHSST
        send("No default location for %s and unable to guess a location, defaulting to TJ (22312)." % nick)
        return '22312'
    else:
        return location
Esempio n. 2
0
def get_test_data_path(name, module=None):
    if resources is None:
        raise RuntimeError("The importlib_resources package is required to get"
                           " test data on systems with Python < 3.7")

    if module is None:
        from . import data as test_data
        module = test_data

    with resources.path(module, name) as path:
        return str(path)
Esempio n. 3
0
File: orm.py Progetto: tjcsl/cslbot
def setup_db(session, botconfig, confdir):
    """Sets up the database."""
    Base.metadata.create_all(session.connection())
    # If we're creating a fresh db, we don't need to worry about migrations.
    if not session.get_bind().has_table('alembic_version'):
        conf_obj = config.Config()
        conf_obj.set_main_option('bot_config_path', confdir)
        with resources.path('cslbot', botconfig['alembic']['script_location']) as script_location:
            conf_obj.set_main_option('script_location', str(script_location))
            command.stamp(conf_obj, 'head')

    # Populate permissions table with owner.
    owner_nick = botconfig['auth']['owner']
    if not session.query(Permissions).filter(Permissions.nick == owner_nick).count():
        session.add(Permissions(nick=owner_nick, role='owner'))
Esempio n. 4
0
    if torch.cuda.is_available():
        device = torch.device("cuda")
    else:
        print("Warning: Running on CPU---sampling might take a while...")
        device = torch.device("cpu")
    midas = Midas().eval().to(device)
    # init transformer
    renderer = Renderer(model=opt.model, device=device)

    if opt.path is None:
        try:
            import importlib.resources as pkg_resources
        except ImportError:
            import importlib_resources as pkg_resources

        with pkg_resources.path("geofree.examples", "artist.jpg") as path:
            example = load_as_example(path)
    else:
        path = opt.path
        if not os.path.isfile(path):
            Tk().withdraw()
            path = askopenfilename(initialdir=sys.argv[1])
        example = load_as_example(path)

    ims = example["src_img"][None, ...]
    K = example["K"]

    # compute depth for preview
    dms = [None]
    for i in range(ims.shape[0]):
        midas_in = torch.tensor(ims[i])[None, ...].permute(0, 3, 1,
Esempio n. 5
0
 def _in_proc_script_path():
     return resources.path(__package__, '_in_process.py')
def genBuildGradle(projectType, team):
    with resources.path(__name__, "templates") as path:
        with open(os.path.join(path, projectType, "build.gradle.mako"),
                  "r") as template:
            return Template(template.read()).render(team=team)
Esempio n. 7
0
def genRobotCode(projectType, config):
    if projectType == "Simple":
        with resources.path(__name__, "templates") as path:
            with open(os.path.join(path, "Simple", "Robot.java.mako"),
                      "r") as template:
                return Template(template.read()).render(
                    diam=config["wheelDiameter"],
                    ppr=config["encoderPPR"],
                    lports=config["leftMotorPorts"],
                    rports=config["rightMotorPorts"],
                    linverted=config["leftMotorsInverted"],
                    rinverted=config["rightMotorsInverted"],
                    lcontrollers=config["leftControllerTypes"],
                    rcontrollers=config["rightControllerTypes"],
                    lencoderports=config["leftEncoderPorts"],
                    rencoderports=config["rightEncoderPorts"],
                    lencoderinv=config["leftEncoderInverted"],
                    rencoderinv=config["rightEncoderInverted"],
                    gyro=config["gyroType"],
                    gyroport=config["gyroPort"],
                )
    elif projectType == "Talon":
        with resources.path(__name__, "templates") as path:
            with open(os.path.join(path, "Talon", "Robot.java.mako"),
                      "r") as template:
                return Template(template.read()).render(
                    diam=config["wheelDiameter"],
                    ppr=config["encoderPPR"],
                    lports=config["leftMotorPorts"],
                    rports=config["rightMotorPorts"],
                    linverted=config["leftMotorsInverted"],
                    rinverted=config["rightMotorsInverted"],
                    lcontrollers=config["leftControllerTypes"],
                    rcontrollers=config["rightControllerTypes"],
                    lencoderinv=config["leftEncoderInverted"],
                    rencoderinv=config["rightEncoderInverted"],
                    gyro=config["gyroType"],
                    gyroport=config["gyroPort"],
                )
    elif projectType == "SparkMax":
        with resources.path(__name__, "templates") as path:
            with open(os.path.join(path, "SparkMax", "Robot.java.mako"),
                      "r") as template:
                return Template(template.read()).render(
                    diam=config["wheelDiameter"],
                    ppr=config["encoderPPR"],
                    gearing=config["gearing"],
                    lports=config["leftMotorPorts"],
                    rports=config["rightMotorPorts"],
                    linverted=config["leftMotorsInverted"],
                    rinverted=config["rightMotorsInverted"],
                    lencoderinv=config["leftEncoderInverted"],
                    rencoderinv=config["rightEncoderInverted"],
                    gyro=config["gyroType"],
                    gyroport=config["gyroPort"],
                )
    elif projectType == "Neo":
        with resources.path(__name__, "templates") as path:
            with open(os.path.join(path, "Neo", "Robot.java.mako"),
                      "r") as template:
                return Template(template.read()).render(
                    diam=config["wheelDiameter"],
                    gearing=config["gearing"],
                    lports=config["leftMotorPorts"],
                    rports=config["rightMotorPorts"],
                    linverted=config["leftMotorsInverted"],
                    rinverted=config["rightMotorsInverted"],
                    gyro=config["gyroType"],
                    gyroport=config["gyroPort"],
                )
Esempio n. 8
0
    def setup(self):
        super().setup()

        # Load song and get waveform
        with LogSection(logger, "loading song and waveform"):
            with pkg_resources.path(charm.data.audio, "fourth_wall.wav") as p:
                self._song = arcade.load_sound(p)
                load = librosa.load(p, mono=True)
            self.waveform: ndarray[float] = load[0]
            self.sample_rate: int = load[1]

        # Create an index of samples
        with LogSection(logger, "indexing samples"):
            samples: list[SoundPoint] = []
            for n, s in enumerate(self.waveform):
                samples.append(SoundPoint((1 / self.sample_rate) * n, s))
            self.samples = nindex.Index(samples, "time")

        # Create an index of beats
        with LogSection(logger, "indexing beats"):
            self.bpm, beats = librosa.beat.beat_track(y = self.waveform, sr = self.sample_rate, units = "time")
            self.beats = nindex.Index([Beat(t) for t in beats[::2]], "time")

        self.chart_available = False
        # Create an index of chart notes
        with LogSection(logger, "parsing chart"):
            path = songspath / "fnf" / "fourth-wall"
            self.songdata = FNFSong.parse(path)
        if self.songdata:
            with LogSection(logger, "indexing notes"):
                self.chart_available = True
                self.player_chart = nindex.Index(self.songdata.charts[0].notes, "time")
                enemy_chart = self.songdata.get_chart(2, self.songdata.charts[0].difficulty)
                self.enemy_chart = nindex.Index(enemy_chart.notes, "time")
            with LogSection(logger, "generating highway"):
                self.highway = FNFHighway(self.songdata.charts[0], (((Settings.width // 3) * 2), 0), auto = True)

        # Create background stars
        with LogSection(logger, "creating stars"):
            self.star_camera = arcade.Camera()
            self.stars = arcade.SpriteList()
            self.scroll_speed = 20  # px/s
            stars_per_screen = 100
            star_height = Settings.height + int(self._song.source.duration * self.scroll_speed)
            star_amount = int(stars_per_screen * (star_height / Settings.height))
            logger.info(f"Generating {star_amount} stars...")
            for i in range(star_amount):
                sprite = arcade.SpriteCircle(5, arcade.color.WHITE + (255,), True)
                sprite.center_x = randint(0, Settings.width)
                sprite.center_y = randint(-(star_height - Settings.height), Settings.height)
                self.stars.append(sprite)

        with LogSection(logger, "creating text"):
            self.text = arcade.Text("Fourth Wall by Jacaris", Settings.width / 4, Settings.height * (0.9),
            font_name = "Determination Sans", font_size = 32, align="center", anchor_x="center", anchor_y="center", width = Settings.width)

        with LogSection(logger, "making gradient"):
            # Gradient
            self.gradient = arcade.create_rectangle_filled_with_colors(
                [(-250, Settings.height), (Settings.width + 250, Settings.height), (Settings.width + 250, -250), (-250, -250)],
                [arcade.color.BLACK, arcade.color.BLACK, arcade.color.DARK_PASTEL_PURPLE, arcade.color.DARK_PASTEL_PURPLE]
            )

        with LogSection(logger, "loading sprites"):
            self.scott_atlas = arcade.TextureAtlas((8192, 8192))
            self.sprite_list = arcade.SpriteList(atlas = self.scott_atlas)
            self.sprite = sprite_from_adobe("scott", ("bottom", "left"))
            self.boyfriend = sprite_from_adobe("bfScott", ("bottom", "right"))
            self.sprite_list.append(self.sprite)
            self.sprite_list.append(self.boyfriend)
            self.sprite.cache_textures()
            self.boyfriend.cache_textures()
            self.sprite.bottom = 0
            self.sprite.left = 0
            self.boyfriend.bottom = 0
            self.boyfriend.right = Settings.width - 50
            self.sprite.set_animation("idle")
            self.boyfriend.set_animation("idle")

        # Settings
        with LogSection(logger, "finalizing setup"):
            self.multiplier = 250
            self.y = Settings.height // 2
            self.line_width = 1
            self.x_scale = 2
            self.resolution = 4
            self.beat_time = 0.5
            self.show_text = False

            # RAM
            self.pixels: list[tuple[int, int]] = [(0, 0) * Settings.width]
            self.last_beat = -self.beat_time
            self.last_enemy_note: FNFNote = None
            self.last_player_note: FNFNote = None
            self.did_harcode = False
Esempio n. 9
0
"""
Non-code files
"""

import json
from importlib import resources

# Load JSON data for direct access
with resources.path(__name__, "media_types.json") as media_types_file, open(
        media_types_file, "r") as fp:
    MIME_TYPES = set(json.load(fp))

MIME_TYPE_REGISTRIES = {
    mime_type.split("/", maxsplit=1)[0]
    for mime_type in MIME_TYPES
}

with resources.path(__name__,
                    "eml_dump_input.schema.json") as schema_file, open(
                        schema_file) as schema_fp:
    EML_DUMP_INPUT_SCHEMA = json.load(schema_fp)
Esempio n. 10
0
    def test_instruction_line_creation(self):
        with pkg_resources.path(
                config_files, 'test_instruction_list_creation_isa.json') as fp:
            isa_model = AssemblerModel(str(fp), 0)

        label_values = GlobalLabelScope(isa_model.registers)
        label_values.set_label_value('test1', 0xA, 1)
        label_values.set_label_value('high_de', 0xde00, 1)

        ins1 = InstructionLine.factory(22, '  lda test1', 'some comment!',
                                       isa_model)
        ins1.set_start_address(1212)
        self.assertIsInstance(ins1, InstructionLine)
        self.assertEqual(ins1.byte_size, 1, 'has 1 byte')
        ins1.label_scope = label_values
        ins1.generate_bytes()
        self.assertEqual(ins1.get_bytes(), bytearray([0x1a]),
                         'instruction should match')

        ins2 = InstructionLine.factory(22, '  hlt', 'stop it!', isa_model)
        ins2.set_start_address(1212)
        self.assertIsInstance(ins2, InstructionLine)
        self.assertEqual(ins2.byte_size, 1, 'has 1 byte')
        ins2.label_scope = label_values
        ins2.generate_bytes()
        self.assertEqual(ins2.get_bytes(), bytearray([0xF0]),
                         'instruction should match')

        ins3 = InstructionLine.factory(22, '  seta (high_de + $00AD)',
                                       'is it alive?', isa_model)
        ins3.set_start_address(1313)
        self.assertIsInstance(ins3, InstructionLine)
        self.assertEqual(ins3.byte_size, 3, 'has 3 bytes')
        ins3.label_scope = label_values
        ins3.generate_bytes()
        self.assertEqual(ins3.get_bytes(), bytearray([0x30, 0xAD, 0xDE]),
                         'instruction should match')

        ins4 = InstructionLine.factory(22, '  lda test1+2', 'load it',
                                       isa_model)
        ins4.set_start_address(1313)
        self.assertIsInstance(ins4, InstructionLine)
        self.assertEqual(ins4.byte_size, 1, 'has 1 byte')
        ins4.label_scope = label_values
        ins4.generate_bytes()
        self.assertEqual(ins4.get_bytes(), bytearray([0x1c]),
                         'instruction should match')

        ins5 = InstructionLine.factory(22, '  plus 8', 'plus it', isa_model)
        ins5.set_start_address(888)
        self.assertIsInstance(ins5, InstructionLine)
        self.assertEqual(ins5.byte_size, 2, 'has 2 bytes')
        ins5.label_scope = label_values
        ins5.generate_bytes()
        self.assertEqual(ins5.get_bytes(), bytearray([0x40, 0x08]),
                         'instruction should match')

        ins6 = InstructionLine.factory(22, '  lda test1-2', 'load it',
                                       isa_model)
        ins6.set_start_address(888)
        self.assertIsInstance(ins6, InstructionLine)
        self.assertEqual(ins6.byte_size, 1, 'has 1 byte1')
        ins6.label_scope = label_values
        ins6.generate_bytes()
        self.assertEqual(ins6.get_bytes(), bytearray([0x18]),
                         'instruction should match')
Esempio n. 11
0
import json
from importlib import resources
from typing import Tuple

import license_sh.data as license_data

valid_license_ids = {}
with resources.path(license_data, "licenses.json") as licenses_path:
    with open(licenses_path) as json_file:
        data = json.load(json_file)
        for license in data["licenses"]:
            license["licenseIdUppercase"] = license["licenseId"].upper()

        # Let's create a mapping where upper case licenses will map
        # to the original SPDX licenses
        valid_license_ids = {
            license["licenseIdUppercase"]: license["licenseId"]
            for license in data["licenses"]
        }

        licenses_by_names = {
            license["name"]: license["licenseId"]
            for license in data["licenses"]
        }


def is_spdx_compliant(license: str) -> bool:
    return license in valid_license_ids.values()


def normalize(license: str) -> Tuple[str, bool]:
    def handle(self, **options):
        locale = options['locale']
        exclude = options['exclude']
        ignore_patterns = set(options['ignore_patterns'])
        self.verbosity = options['verbosity']
        if options['fuzzy']:
            self.program_options = self.program_options + ['-f']

        if find_command(self.program) is None:
            raise CommandError("Can't find %s. Make sure you have GNU gettext "
                               "tools 0.15 or newer installed." % self.program)

        basedirs = [os.path.join('conf', 'locale'), 'locale']
        if os.environ.get('DJANGO_SETTINGS_MODULE'):
            from django.conf import settings
            basedirs.extend(settings.LOCALE_PATHS)

        # Walk entire tree, looking for locale directories
        apps = []
        for mod in load_openimis_conf()["modules"]:
            mod_name = mod["name"]
            with resources.path(mod_name, "__init__.py") as path:
                apps.append(path.parent.parent)  # This might need to be more restrictive

        for topdir in ["."] + apps:
            for dirpath, dirnames, filenames in os.walk(topdir, topdown=True):
                for dirname in dirnames:
                    if is_ignored_path(os.path.normpath(os.path.join(dirpath, dirname)), ignore_patterns):
                        dirnames.remove(dirname)
                    elif dirname == 'locale':
                        basedirs.append(os.path.join(dirpath, dirname))

        # Gather existing directories.
        basedirs = set(map(os.path.abspath, filter(os.path.isdir, basedirs)))

        if not basedirs:
            raise CommandError("This script should be run from the Django Git "
                               "checkout or your project or app tree, or with "
                               "the settings module specified.")

        # Build locale list
        all_locales = []
        for basedir in basedirs:
            locale_dirs = filter(os.path.isdir, glob.glob('%s/*' % basedir))
            all_locales.extend(map(os.path.basename, locale_dirs))

        # Account for excluded locales
        locales = locale or all_locales
        locales = set(locales).difference(exclude)

        self.has_errors = False
        for basedir in basedirs:
            if locales:
                dirs = [os.path.join(basedir, l, 'LC_MESSAGES') for l in locales]
            else:
                dirs = [basedir]
            locations = []
            for ldir in dirs:
                for dirpath, dirnames, filenames in os.walk(ldir):
                    locations.extend((dirpath, f) for f in filenames if f.endswith('.po'))
            if locations:
                self.compile_messages(locations)

        if self.has_errors:
            raise CommandError('compilemessages generated one or more errors.')
Esempio n. 13
0
# -*- coding: utf-8 -*-
"""Climate indices computation package based on Xarray."""
from importlib.resources import path

from xclim.core import units  # noqa
from xclim.core.indicator import build_indicator_module_from_yaml
from xclim.core.options import set_options  # noqa
from xclim.indicators import atmos, land, seaIce  # noqa

__author__ = """Travis Logan"""
__email__ = "*****@*****.**"
__version__ = "0.26.2-beta"

# Virtual modules creation:
with path("xclim.data", "icclim.yml") as f:
    build_indicator_module_from_yaml(f, mode="raise")
with path("xclim.data", "anuclim.yml") as f:
    build_indicator_module_from_yaml(f, mode="raise")
with path("xclim.data", "cf.yml") as f:
    # ignore because some generic function are missing.
    build_indicator_module_from_yaml(f, mode="ignore")
Esempio n. 14
0
import random
import numpy as np

from barl_simpleoptions import BaseEnvironment

from barl_envs.renderers import RoomRenderer

# Import room template files.
try:
    import importlib.resources as pkg_resources
except ImportError:
    import importlib_resources as pkg_resources

from . import data

with pkg_resources.path(data, "two_rooms.txt") as path:
    default_two_room = path

with pkg_resources.path(data, "six_rooms.txt") as path:
    default_six_room = path

with pkg_resources.path(data, "xu_four_rooms.txt") as path:
    xu_four_room = path

with pkg_resources.path(data, "bridge_room.txt") as path:
    bridge_room = path

with pkg_resources.path(data, "cage_room.txt") as path:
    cage_room = path

with pkg_resources.path(data, "empty_room.txt") as path:
Esempio n. 15
0
def np04cli(ctx, obj, traceback, loglevel, elisa_conf, log_path, timeout,
            cfg_dumpdir, dotnanorc, kerberos, cfg_dir, user):

    if not elisa_conf:
        with resources.path(confdata, "elisa_conf.json") as p:
            elisa_conf = p

    obj.print_traceback = traceback
    credentials.change_user(user)
    ctx.command.shell.prompt = f"{credentials.user}@np04rc> "
    grid = Table(title='Shonky Nano04RC', show_header=False, show_edge=False)
    grid.add_column()
    grid.add_row(
        "This is an admittedly shonky nano RC to control DUNE-DAQ applications."
    )
    grid.add_row("  Give it a command and it will do your biddings,")
    grid.add_row("  but trust it and it will betray you!")
    grid.add_row(f"Use it with care, {credentials.user}!")

    obj.console.print(Panel.fit(grid))

    if loglevel:
        updateLogLevel(loglevel)

    try:
        dotnanorc = os.path.expanduser(dotnanorc)
        obj.console.print(f"[blue]Loading {dotnanorc}[/blue]")
        f = open(dotnanorc)
        dotnanorc = json.load(f)

        rundb_socket = json.loads(
            resources.read_text(confdata, "run_number.json"))['socket']
        runreg_socket = json.loads(
            resources.read_text(confdata, "run_registry.json"))['socket']

        credentials.add_login("rundb", dotnanorc["rundb"]["user"],
                              dotnanorc["rundb"]["password"])
        credentials.add_login("runregistrydb",
                              dotnanorc["runregistrydb"]["user"],
                              dotnanorc["runregistrydb"]["password"])
        logging.getLogger("cli").info("RunDB socket " + rundb_socket)
        logging.getLogger("cli").info("RunRegistryDB socket " + runreg_socket)

        rc = NanoRC(console=obj.console,
                    top_cfg=cfg_dir,
                    run_num_mgr=DBRunNumberManager(rundb_socket),
                    run_registry=DBConfigSaver(runreg_socket),
                    logbook_type=elisa_conf,
                    timeout=timeout,
                    use_kerb=kerberos)

        rc.log_path = os.path.abspath(log_path)
    except Exception as e:
        logging.getLogger("cli").exception("Failed to build NanoRC")
        raise click.Abort()

    def cleanup_rc():
        logging.getLogger("cli").warning(
            "NanoRC context cleanup: Terminating RC before exiting")
        rc.terminate()
        if rc.return_code:
            ctx.exit(rc.return_code)

    ctx.call_on_close(cleanup_rc)
    obj.rc = rc
    rc.ls(False)
Esempio n. 16
0
 def setUp(self):
     self.p0_03 = ir.path(data, 'p0_03.j2k')
     self.p0_06 = ir.path(data, 'p0_06.j2k')
     self.p1_06 = ir.path(data, 'p1_06.j2k')
     self.issue142 = ir.path(data, 'issue142.j2k')
     self.edf_c2_1178956 = ir.path(data, 'edf_c2_1178956.jp2')
Esempio n. 17
0
 def load_icon_map(self):
     """Load the icon map
     """
     with path("dakara_player.resources", ICON_MAP_FILE) as file:
         self.icon_map = json.loads(file.read_text())
Esempio n. 18
0
    def test_label_line_with_instruction(self):
        with pkg_resources.path(config_files,
                                'test_instructions_with_variants.yaml') as fp:
            isa_model = AssemblerModel(str(fp), 0)

        label_values = GlobalLabelScope(isa_model.registers)
        label_values.set_label_value('a_const', 40, 1)

        lineid = LineIdentifier(123, 'test_label_line_with_instruction')

        # test data line on label line
        objs1: list[LineObject] = LineOjectFactory.parse_line(
            lineid, 'the_byte: .byte 0x88 ; label and instruction', isa_model)
        self.assertEqual(len(objs1), 2, 'there should be two instructions')
        self.assertIsInstance(objs1[0], LabelLine,
                              'the first line object should be a label')
        self.assertIsInstance(objs1[1], DataLine,
                              'the first line object should be a data line')
        self.assertEqual(objs1[0].get_label(), 'the_byte',
                         'the label string should match')
        objs1[1].label_scope = label_values
        objs1[1].generate_bytes()
        self.assertEqual(objs1[1].byte_size, 1,
                         'the data value should have 1 byte')
        self.assertEqual(list(objs1[1].get_bytes()), [0x88],
                         'the data value should be [0x88]')

        # test instruction on label line
        objs2: list[LineObject] = LineOjectFactory.parse_line(
            lineid, 'the_instr: mov a, a_const ; label and instruction',
            isa_model)
        self.assertEqual(len(objs2), 2, 'there should be two instructions')
        self.assertIsInstance(objs2[0], LabelLine,
                              'the first line object should be a label')
        self.assertIsInstance(
            objs2[1], InstructionLine,
            'the first line object should be an Instruction line')
        self.assertEqual(objs2[0].get_label(), 'the_instr',
                         'the label string should match')
        objs2[1].label_scope = label_values
        objs2[1].generate_bytes()
        self.assertEqual(objs2[1].byte_size, 2,
                         'the instruction value should have 2 bytes')
        self.assertEqual(list(objs2[1].get_bytes()), [0b01000111, 40],
                         'the instruction bytes should match')

        # labels with no inline instruction should also work
        objs3: list[LineObject] = LineOjectFactory.parse_line(
            lineid, 'the_label: ;just a label', isa_model)
        self.assertEqual(len(objs3), 1, 'there should be two instructions')
        self.assertIsInstance(objs3[0], LabelLine,
                              'the first line object should be a label')
        self.assertEqual(objs3[0].get_label(), 'the_label',
                         'the label string should match')

        # labels with constants should not work
        with self.assertRaises(SystemExit, msg='this instruction should fail'):
            LineOjectFactory.parse_line(
                lineid, 'the_label: const = 3 ; label with constant',
                isa_model)
        # labels with other labels should not work
        with self.assertRaises(SystemExit, msg='this instruction should fail'):
            LineOjectFactory.parse_line(
                lineid,
                'the_label: the_second_label: ; label with another label',
                isa_model)
Esempio n. 19
0
    def get_stations_from_configfile(filename: str = None,
                                     codenames: list = None,
                                     name: str = 'network') -> Stations:
        """Creates a Stations object (i.e. a network of stations) by reading the station
        information from an input file. Optionally, it allows to select only a subset of
        all stations in the file.

        Inputs
        - filename : str
            Path to the text file containing the information from all stations.
            If not provided, it reads the default station catalog file located in
                            data/stations_catalog.inp
            Any other file should contain the same format (standard Python input config files),
            with the following fields per station (whose name would be provided as the name of
            section).
            - station - full name of the station.
            - code : codename assigned to the station. It must be unique (typically two letters).
            - network - main network to which it belongs to.
            - possible_networks - all networks the station can participate in (including 'network')
            - country - country where the station is located.
            - diameter - free format string with the diameter of the station
                        (optional more information in case of interferometers).
            - position = x, y, z (in meters). Geocentric position of the station.
            - min_elevation (in degrees) - minimum elevation the station can observe.
            - real_time = yes/no - if the station can participate in real-time observations (e.g. e-EVN).
            - SEFD_**  - SEFD (in Jy units) of the station at the **cm band. If a given band is not present,
                        it is assumed that the station cannot observe it.
                        For example SEFD_21 = 500 means that the SEFD at 21cm is 500 Jy.
            - Any other attribute is accepted, but ignored in this code. That would easily allow future
              extensions of the code.
        - codenames : list
            If you only want to select a subset of all stations available in the input file,
            here you can pass a list with the codenames of the stations that should be imported.
        - name : str
            Name to assign to the network of stations that will be created.

        Returns
        - network : Stations
            Returns a Stations object containing the selected stations.
        """
        config = configparser.ConfigParser()
        if filename is None:
            with resources.path(
                    "data", "stations_catalog.inp") as stations_catalog_path:
                config.read(stations_catalog_path)
        else:
            # With this approach it raises a FileNotFound exception.
            # Otherwise config will run smoothly and provide an empty list.
            config.read(open(filename, 'r'))

        networks = Stations(name, [])
        for stationname in config.sections():
            if (codenames is None) or (config[stationname]['code']
                                       in codenames):
                temp = [
                    float(i.strip())
                    for i in config[stationname]['position'].split(',')
                ]
                a_loc = coord.EarthLocation(temp[0] * u.m, temp[1] * u.m,
                                            temp[2] * u.m)
                # Getting the SEFD values for the bands
                min_elev = float(config[stationname]['min_elevation']) * u.deg
                does_real_time = True if config[stationname][
                    'real_time'] == 'yes' else False
                sefds = {}
                for akey in config[stationname].keys():
                    if 'SEFD_' in akey.upper():
                        sefds[f"{akey.upper().replace('SEFD_', '').strip()}cm"] = \
                                            float(config[stationname][akey])

                new_station = SelectedStation(
                    stationname, config[stationname]['code'],
                    config[stationname]['network'], a_loc, sefds, min_elev,
                    config[stationname]['station'],
                    config[stationname]['possible_networks'],
                    config[stationname]['country'],
                    config[stationname]['diameter'], does_real_time)
                networks.add(new_station)

        return networks
Esempio n. 20
0
 def load_ui_file(self) -> None:
     with resources.path("speedwagon.ui", "tab_editor.ui") as ui_file:
         uic.loadUi(ui_file, self)
Esempio n. 21
0
    def generate(self) -> None:
        extension_name = self.language_name
        extension_dir_path = os.path.join(self.export_dir, 'extensions', extension_name)

        if self.verbose >= 1:
            print(f'Generating Visual Studio Code extension for language "{self.language_id}" at: {extension_dir_path}')

        # create the extensions directory if it doesn't exist
        Path(extension_dir_path).mkdir(parents=True, exist_ok=True)
        Path(os.path.join(extension_dir_path, 'syntaxes')).mkdir(parents=True, exist_ok=True)
        # generate package.json
        with pkg_resources.path(resources, 'package.json') as fp:
            with open(fp, 'r') as json_file:
                package_json = json.load(json_file)

        scope_name = 'source.' + self.language_id
        theme_filename = self.language_id + '.tmTheme'
        package_json['name'] = self.language_name
        package_json['displayName'] = self.model.description
        package_json['version'] = self.language_version
        package_json['contributes']['languages'][0]['id'] = self.language_id
        package_json['contributes']['languages'][0]['extensions'] = ['.'+self.code_extension]
        package_json['contributes']['grammars'][0]['language'] = self.language_id
        package_json['contributes']['grammars'][0]['scopeName'] = scope_name
        package_json['contributes']['snippets'][0]['language'] = self.language_id
        package_json['contributes']['themes'][0]['path'] = './' + theme_filename

        package_fp = os.path.join(extension_dir_path, 'package.json')
        with open(package_fp, 'w', encoding='utf-8') as f:
            json.dump(package_json, f, ensure_ascii=False, indent=4)
            if self.verbose > 1:
                print('  generated package.json')

        # generate tmGrammar.json
        with pkg_resources.path(resources, 'tmGrammar.json') as fp:
            with open(fp, 'r') as json_file:
                grammar_json = json.load(json_file)

        grammar_json['scopeName'] = scope_name
        # handle instructions
        grammar_json['repository']['instructions']['begin'] = self._replace_token_with_regex_list(
            grammar_json['repository']['instructions']['begin'],
            '##INSTRUCTIONS##',
            self.model.instruction_mnemonics
        )

        # handle registers
        if len(self.model.registers) > 0:
            # update the registers syntax
            grammar_json['repository']['registers']['match'] = self._replace_token_with_regex_list(
                grammar_json['repository']['registers']['match'],
                '##REGISTERS##',
                self.model.registers
            )
        else:
            # remove the registers syntax
            del grammar_json['repository']['registers']

        # handled predefined labels
        predefined_labels = self.model.predefined_labels
        if len(predefined_labels) > 0:
            # update the registers syntax
            grammar_json['repository']['compiler_labels']['match'] = self._replace_token_with_regex_list(
                grammar_json['repository']['registers']['match'],
                '##COMPILERCONSTANTS##',
                predefined_labels
            )
        else:
            # remove the registers syntax
            del grammar_json['repository']['compiler_labels']

        # handle bespokeasm diectives
        for item in grammar_json['repository']['directives']['patterns']:
            if 'keyword.other.directive' == item['name']:
                directives_regex = '|'.join(['\\.'+d for d in COMPILER_DIRECTIVES_SET])
                directives_str = item['match']
                item['match'] = directives_str.replace('##DIRECTIVES##', directives_regex)
            elif 'storage.type' == item['name']:
                datatypes_regex = '|'.join(['\\.'+d for d in BYTECODE_DIRECTIVES_SET])
                datatypes_str = item['match']
                item['match'] = datatypes_str.replace('##DATATYPES##', datatypes_regex)
            elif 'meta.preprocessor' == item['name']:
                for pattern in item['patterns']:
                    if 'name' in pattern and 'keyword.control.preprocessor' == pattern['name']:
                        preprocessor_regex = '|'.join(PREPROCESSOR_DIRECTIVES_SET)
                        preprocesspr_str = pattern['match']
                        pattern['match'] = preprocesspr_str.replace('##PREPROCESSOR##', preprocessor_regex)

        tmGrammar_fp = os.path.join(extension_dir_path, 'syntaxes', 'tmGrammar.json')
        with open(tmGrammar_fp, 'w', encoding='utf-8') as f:
            json.dump(grammar_json, f, ensure_ascii=False, indent=4)
            if self.verbose > 1:
                print(f'  generated {os.path.basename(tmGrammar_fp)}')

        # copy snippets.json and lanaguage-configuration.json, nothing to modify
        with pkg_resources.path(resources, 'snippets.json') as fp:
            shutil.copy(str(fp), extension_dir_path)
            if self.verbose > 1:
                print(f'  generated {os.path.basename(str(fp))}')

        with pkg_resources.path(resources, 'language-configuration.json') as fp:
            shutil.copy(str(fp), extension_dir_path)
            if self.verbose > 1:
                print(f'  generated {os.path.basename(str(fp))}')

        with pkg_resources.path(resources, 'tmTheme.xml') as fp:
            shutil.copy(str(fp), os.path.join(extension_dir_path, theme_filename))
            if self.verbose > 1:
                print(f'  generated {theme_filename}')
Esempio n. 22
0
    def load_theme(self, file_path: Union[str, PathLike, io.StringIO,
                                          PackageResource]):
        """
        Loads a theme file, and currently, all associated data like fonts and images required
        by the theme.

        :param file_path: The path to the theme we want to load.
        """
        if isinstance(file_path, PackageResource):
            if USE_IMPORT_LIB_RESOURCE:
                used_file_path = io.StringIO(
                    read_text(file_path.package, file_path.resource))
                self._theme_file_path = file_path
                with path(file_path.package,
                          file_path.resource) as package_file_path:
                    self._theme_file_last_modified = os.stat(
                        package_file_path).st_mtime
            elif USE_FILE_PATH:
                used_file_path = file_path.to_path()

        elif not isinstance(file_path, io.StringIO):
            self._theme_file_path = create_resource_path(file_path)
            try:
                self._theme_file_last_modified = os.stat(
                    self._theme_file_path).st_mtime
            except (pygame.error, FileNotFoundError, OSError):
                self._theme_file_last_modified = 0
            used_file_path = self._theme_file_path
        else:
            used_file_path = file_path

        with self._opened_w_error(used_file_path, 'r') as (theme_file, error):
            if error:
                warnings.warn("Failed to open theme file at path:" +
                              str(file_path))
                load_success = False
            else:
                try:
                    theme_dict = json.load(theme_file,
                                           object_pairs_hook=OrderedDict)
                except json.decoder.JSONDecodeError:
                    warnings.warn(
                        "Failed to load current theme file, check syntax",
                        UserWarning)
                    load_success = False
                else:
                    load_success = True

                if load_success:

                    for element_name in theme_dict.keys():
                        if element_name == 'defaults':
                            self._load_colour_defaults_from_theme(theme_dict)
                        else:
                            self._load_prototype(element_name, theme_dict)
                            for data_type in theme_dict[element_name]:
                                if data_type == 'font':
                                    self._load_element_font_data_from_theme(
                                        data_type, element_name, theme_dict)

                                if data_type == 'colours':
                                    self._load_element_colour_data_from_theme(
                                        data_type, element_name, theme_dict)

                                elif data_type == 'images':
                                    self._load_element_image_data_from_theme(
                                        data_type, element_name, theme_dict)

                                elif data_type == 'misc':
                                    self._load_element_misc_data_from_theme(
                                        data_type, element_name, theme_dict)

        if load_success:
            self._load_fonts(
            )  # save to trigger load with the same data as it won't do anything
            self._load_images()
            self._preload_shadow_edges()
Esempio n. 23
0
 def _fields_doc(cls) -> ContextManager[Path_T]:
     return res.path(naaccr_layout, 'doc')
Esempio n. 24
0
    def compute(self, inputs, outputs):

        # Create result folder first (if it must fail, let it fail as soon as possible)
        result_folder_path = self.options[OPTION_RESULT_FOLDER_PATH]
        if result_folder_path != "":
            os.makedirs(result_folder_path, exist_ok=True)

        # Get inputs
        reynolds = inputs["xfoil:reynolds"]
        mach = inputs["xfoil:mach"]
        thickness_ratio = inputs["data:geometry:wing:thickness_ratio"]

        # Pre-processing (populating temp directory) -----------------------------------------------
        # XFoil exe
        tmp_directory = self._create_tmp_directory()
        if self.options[OPTION_XFOIL_EXE_PATH]:
            # if a path for Xfoil has been provided, simply use it
            self.options["command"] = [self.options[OPTION_XFOIL_EXE_PATH]]
        else:
            # otherwise, copy the embedded resource in tmp dir
            copy_resource(xfoil699, XFOIL_EXE_NAME, tmp_directory.name)
            self.options["command"] = [
                pth.join(tmp_directory.name, XFOIL_EXE_NAME)
            ]

        # I/O files
        self.stdin = pth.join(tmp_directory.name, _INPUT_FILE_NAME)
        self.stdout = pth.join(tmp_directory.name, _STDOUT_FILE_NAME)
        self.stderr = pth.join(tmp_directory.name, _STDERR_FILE_NAME)

        # profile file
        tmp_profile_file_path = pth.join(tmp_directory.name,
                                         _TMP_PROFILE_FILE_NAME)
        profile = get_profile(file_name=self.options[OPTION_PROFILE_NAME],
                              thickness_ratio=thickness_ratio).get_sides()
        np.savetxt(
            tmp_profile_file_path,
            profile.to_numpy(),
            fmt="%.15f",
            delimiter=" ",
            header="Wing",
            comments="",
        )

        # standard input file
        tmp_result_file_path = pth.join(tmp_directory.name,
                                        _TMP_RESULT_FILE_NAME)
        parser = InputFileGenerator()
        with path(resources, _INPUT_FILE_NAME) as input_template_path:
            parser.set_template_file(input_template_path)
            parser.set_generated_file(self.stdin)

            # Fills numeric values
            parser.mark_anchor("RE")
            parser.transfer_var(float(reynolds), 1, 1)
            parser.mark_anchor("M")
            parser.transfer_var(float(mach), 1, 1)
            parser.mark_anchor("ITER")
            parser.transfer_var(self.options[OPTION_ITER_LIMIT], 1, 1)
            parser.mark_anchor("ASEQ")
            parser.transfer_var(self.options[OPTION_ALPHA_START], 1, 1)
            parser.transfer_var(self.options[OPTION_ALPHA_END], 2, 1)

            # Fills string values
            # If a provide path contains the string that is used as next anchor, the process
            # will fail. Doing these replacements at the end prevent this to happen.
            parser.reset_anchor()
            parser.mark_anchor("LOAD")
            parser.transfer_var(tmp_profile_file_path, 1, 1)
            parser.mark_anchor("PACC", -2)
            parser.transfer_var(tmp_result_file_path, 1, 1)

            parser.generate()

        # Run XFOIL --------------------------------------------------------------------------------
        self.options["external_input_files"] = [
            self.stdin, tmp_profile_file_path
        ]
        self.options["external_output_files"] = [tmp_result_file_path]
        super().compute(inputs, outputs)

        # Post-processing --------------------------------------------------------------------------
        result_array = self._read_polar(tmp_result_file_path)
        outputs["xfoil:CL_max_2D"] = self._get_max_cl(result_array["alpha"],
                                                      result_array["CL"])

        # Getting output files if needed
        if self.options[OPTION_RESULT_FOLDER_PATH]:
            if pth.exists(tmp_result_file_path):
                polar_file_path = pth.join(
                    result_folder_path,
                    self.options[OPTION_RESULT_POLAR_FILENAME])
                shutil.move(tmp_result_file_path, polar_file_path)

            if pth.exists(self.stdin):
                stdin_file_path = pth.join(result_folder_path,
                                           _INPUT_FILE_NAME)
                shutil.move(self.stdin, stdin_file_path)

            if pth.exists(self.stdout):
                stdout_file_path = pth.join(result_folder_path,
                                            _STDOUT_FILE_NAME)
                shutil.move(self.stdout, stdout_file_path)

            if pth.exists(self.stderr):
                stderr_file_path = pth.join(result_folder_path,
                                            _STDERR_FILE_NAME)
                shutil.move(self.stderr, stderr_file_path)

        tmp_directory.cleanup()
Esempio n. 25
0
 def _code_labels(cls) -> ContextManager[Path_T]:
     return res.path(naaccr_r_raw, 'code-labels')
Esempio n. 26
0
def cert_path():
    """Returns context manager which provides a pathlib.Path object
	for the sspanel/data/survivalservers-com-chain.pem file path."""
    return path('sspanel.data', 'survivalservers-com-chain.pem')
Esempio n. 27
0
class NAACCR_I2B2(object):
    top_folder = r'\i2b2\naaccr\x'[:-1]
    c_name = 'Cancer Cases (NAACCR Hierarchy)'
    sourcesystem_cd = '*****@*****.**'

    tumor_item_type = _with_path(res.path(heron_load, 'tumor_item_type.csv'),
                                 tab.read_csv)

    seer_recode_terms = _with_path(res.path(heron_load, 'seer_recode_terms.csv'),
                                   tab.read_csv)

    cs_terms = _with_path(res.path(heron_load, 'cs-terms.csv'),
                          tab.read_csv).drop(['update_date', 'sourcesystem_cd'])

    tx_script = SqlScript(
        'naaccr_txform.sql',
        res.read_text(heron_load, 'naaccr_txform.sql'),
        [])

    per_item_view = 'tumor_item_type'

    per_section = _with_path(res.path(heron_load, 'section.csv'),
                             tab.read_csv)

    ont_script = SqlScript(
        'naaccr_concepts_load.sql',
        res.read_text(heron_load, 'naaccr_concepts_load.sql'),
        [
            ('i2b2_path_concept', []),
            ('naaccr_top_concept', ['naaccr_top', 'current_task']),
            ('section_concepts', ['section', 'naaccr_top']),
            ('item_concepts', [per_item_view]),
            ('code_concepts', [per_item_view, 'loinc_naaccr_answer', 'code_labels']),
            ('primary_site_concepts', ['icd_o_topo']),
            # TODO: morphology
            ('seer_recode_concepts', ['seer_site_terms', 'naaccr_top']),
            ('site_schema_concepts', ['cs_terms']),
            ('naaccr_ontology', []),
        ])

    @classmethod
    def ont_view_in(cls, spark: SparkSession_T, task_hash: str, update_date: dt.date,
                    who_cache: Opt[Path_T] = None) -> DataFrame:
        if who_cache:
            who_topo = OncologyMeta.read_table(who_cache, *OncologyMeta.topo_info)
            icd_o_topo = OncologyMeta.icd_o_topo(who_topo)
        else:
            log.warn('skipping WHO Topology terms')
            icd_o_topo = tab.DataFrame.from_records([dict(
                lvl=3, concept_cd='C00', c_visualattributes='FA',
                path='abc', concept_path='LIP', concept_name='x')])

        top = tab.DataFrame.from_records([dict(
            c_hlevel=1,
            c_fullname=cls.top_folder,
            c_name=cls.c_name,
            update_date=update_date,
            sourcesystem_cd=cls.sourcesystem_cd)])
        current_task = tab.DataFrame.from_records([dict(task_hash=task_hash)])
        views = create_objects(spark, cls.ont_script,
                               current_task=current_task,
                               naaccr_top=top,
                               section=cls.per_section,
                               tumor_item_type=cls.tumor_item_type,
                               loinc_naaccr_answer=LOINC_NAACCR.answer,
                               code_labels=NAACCR_R.code_labels(),
                               icd_o_topo=icd_o_topo,
                               cs_terms=cls.cs_terms,
                               seer_site_terms=cls.seer_recode_terms)

        name, _, _ = cls.ont_script.objects[-1]
        return views[name]
Esempio n. 28
0
import rpy2
from rpy2.robjects import numpy2ri
from rpy2.robjects import pandas2ri
numpy2ri.activate()
pandas2ri.activate()

import importlib.resources as pkg_resources

# This loads in the stored principal components and CCA objects needed to conduct analysis
# puff_pca: the PC space to project puffs into
# ccafd: the CCA object used to calculate new CCA scores
# puffmeans: mean values of puffs used to fit the above two objects
with pkg_resources.path('lib', 'puff_pca.rds') as filepath:
    rpy2.robjects.r("puff_pca <- readRDS('" + str(filepath) + "')")

with pkg_resources.path('lib', 'ccafd.rds') as filepath:
    rpy2.robjects.r("ccafd <- readRDS('" + str(filepath) + "')")

with pkg_resources.path('lib', 'puffmeans.Rdata') as filepath:
    rpy2.robjects.r["load"](str(filepath))

with pkg_resources.path('lib', 'analysis.R') as filepath:
    rpy2.robjects.r["source"](str(filepath))

get_pc_scores_r = rpy2.robjects.globalenv['get_pc_scores']
get_features_r = rpy2.robjects.globalenv['get_features']

def get_features(events, intensities,
                 dims = ["residuals", "snr"],
                 stats = ['max','min','mean','median','std']):
    intens_features = get_features_r(intensities)
Esempio n. 29
0
 def test_natural_path(self):
     # Guarantee the internal implementation detail that
     # file-system-backed resources do not get the tempdir
     # treatment.
     with resources.path(self.data, 'utf-8.file') as path:
         assert 'data' in str(path)
Esempio n. 30
0
def read_config(cfg_file: str) -> Configuration:
    """Read one configuration from file"""
    with resources.path(__package__, cfg_file) as cfg_path:
        return Configuration.from_file(cfg_path, name=cfg_path.stem)
Esempio n. 31
0
 def execute(self, package, path):
     with resources.path(package, path):
         pass
Esempio n. 32
0
 def test_remove_in_context_manager(self):
     # It is not an error if the file that was temporarily stashed on the
     # file system is removed inside the `with` stanza.
     with resources.path(self.data, 'utf-8.file') as path:
         path.unlink()
Esempio n. 33
0
#

import sys
import importlib.resources as pkg_resources
from pint import UnitRegistry

from PySide6.QtWidgets import QApplication

from ocvl.FeederGUI import PygmyFeeder


class PygmyMetricks():
    def __init__(self):
        super().__init__()


# Press the green button in the gutter to run the script.
if __name__ == '__main__':

    ureg = UnitRegistry()
    with pkg_resources.path("ocvl", "ocvl-pint-defs.txt") as pint_path:
        deffile = open(pint_path, "r")
        ureg.load_definitions(deffile)

    app = QApplication([])
    widget = PygmyFeeder()
    widget.resize(800, 600)
    widget.show()

    sys.exit(app.exec())