Example #1
0
def dump_flight_to_csv(flight, track_filename_local, thermals_filename_local):
    """Dumps flight data to CSV files.

    Args:
        flight: an igc_lib.Flight, the flight to be written
        track_filename_local: a string, the name of the output CSV with track data
        thermals_filename_local: a string, the name of the output CSV with thermal data
    """
    track_filename = Path(track_filename_local).expanduser().absolute()
    with track_filename.open('wt') as csv:
        csv.write(u"timestamp,lat,lon,bearing,bearing_change_rate,"
                  u"gsp,alt,flying,circling\n")
        for fix in flight.fixes:
            csv.write(u"%f,%f,%f,%f,%f,%f,%f,%s,%s\n" %
                      (fix.timestamp, fix.lat, fix.lon, fix.bearing,
                       fix.bearing_change_rate, fix.gsp, fix.alt,
                       str(fix.flying), str(fix.circling)))

    thermals_filename = Path(thermals_filename_local).expanduser().absolute()
    with thermals_filename.open('wt') as csv:
        csv.write(
            u"timestamp_enter,timestamp_exit,time_change,alt_change,alt_gain,alt_loss,vertical_velocity\n"
        )
        for thermal in flight.thermals:
            csv.write(u"%f,%f,%f,%f,%f,%f,%f\n" %
                      (thermal.enter_fix.timestamp, thermal.exit_fix.timestamp,
                       thermal.time_change(), thermal.alt_change(),
                       thermal.alt_gain(), thermal.alt_loss(),
                       thermal.vertical_velocity()))
Example #2
0
def _check_and_get_file_obj(fpath):
    p = Path(fpath)
    if not p.parent.exists():
        p.parent.mkdir(parents=True)
    if p.is_file():
        return p.open('ab')
    return p.open('wb')
Example #3
0
def dump_flight_to_csv(flight, track_filename_local, thermals_filename_local):
    """Dumps flight data to CSV files.

    Args:
        flight: an igc_lib.Flight, the flight to be written
        track_filename_local: a string, the name of the output CSV with track data
        thermals_filename_local: a string, the name of the output CSV with thermal data
    """
    track_filename = Path(track_filename_local).expanduser().absolute()
    with track_filename.open('wt') as csv:
        csv.write(u"time (UTC),lat,lon,alt,bearing,bearing_change_rate,"
                  u"gsp,gsp avg,gsp stdev,ld,flying,circling\n")
        for fix in flight.fixes:
            time = fix.rawtime
            hour = time // 3600
            time %= 3600
            minutes = time // 60
            time %= 60
            seconds = time
            csv.write(
                u"%d:%d:%d,%f,%f,%f,%f,%f,%f,%f,%f,%f,%s,%s\n" %
                (hour, minutes, seconds, fix.lat, fix.lon, fix.gnss_alt,
                 fix.bearing, fix.bearing_change_rate, fix.gsp, fix.gsp_avg,
                 fix.gsp_std, fix.ld, str(fix.flying), str(fix.circling)))

    thermals_filename = Path(thermals_filename_local).expanduser().absolute()
    with thermals_filename.open('wt') as csv:
        csv.write(u"timestamp_enter,timestamp_exit\n")
        for thermal in flight.thermals:
            csv.write(
                u"%f,%f\n" %
                (thermal.enter_fix.timestamp, thermal.exit_fix.timestamp))
Example #4
0
def test_make_msi_copy(conf_dir):
    src_file = Path(conf_dir, "temp.in")
    with src_file.open('w') as s:
        s.write("+++".decode("utf8"))
    dst_file = Path(conf_dir, "temp.out")
    assert msi_update.copy_file_safe(src_file, dst_file)
    assert dst_file.exists()
    with dst_file.open('r') as d:
        content = d.read()
    assert content == "+++"
Example #5
0
def main(args):
    input_path = Path(args.input)
    with input_path.open('rb') as f:
        model = torch.load(f)

    model = model.cpu()

    if args.output is not None:
        output_path = Path(args.output)
    else:
        output_path = input_path.parent / (input_path.stem + '_cpu' +
                                           input_path.suffix)

    with output_path.open('wb') as f:
        torch.save(model, f)
Example #6
0
def _read_agent_output(hostname):
    # type: (str) -> Optional[Text]
    try:
        import cmk.base.cee.real_time_checks as real_time_checks
    except ImportError:
        real_time_checks = None  # type: ignore

    if real_time_checks and real_time_checks.is_real_time_check_helper():
        rtc_package = real_time_checks.get_rtc_package()
        if rtc_package is not None:
            return cmk.utils.encoding.convert_to_unicode(rtc_package)
        return None

    cache_path = Path(cmk.utils.paths.tcp_cache_dir, hostname)
    try:
        # Use similar decoding logic as cmk.base/data_sources/abstract.py does. In case this is not
        # working as intended, we may have to keep working with bytes here.
        with cache_path.open() as f:
            output = u""
            for l in f:
                output += cmk.utils.encoding.convert_to_unicode(l)
            return output
    except IOError:
        pass
    return None
Example #7
0
def archive_resource(resource, destination):
    """
    Write an archive of a resource
    """
    archive_path = Path(destination, resource.basename)
    if resource.is_repo:
        temp_dir = tempfile.mkdtemp(prefix='clone-')
        try:
            repo = clone(resource.url, temp_dir, resource.commitish)
            logging.debug("Archiving %s@%s to %s", resource.url,
                          resource.commitish, archive_path)
            with archive_path.open("wb") as output:
                repo.archive(output, treeish=str(resource.commitish),
                             prefix=resource.prefix)
        finally:
            shutil.rmtree(temp_dir, ignore_errors=True)
    else:
        url = urlparse(resource.url)
        if url.scheme in SUPPORTED_URL_SCHEMES:
            logging.debug("Fetching %s to %s", resource.url, archive_path)
            fetch_url(url, str(archive_path), 5)
        elif url.scheme in ['', 'file'] and url.netloc == '':
            logging.debug("Copying %s to %s", url.path, archive_path)
            shutil.copyfile(url.path, str(archive_path))
    # else: UnsupportedScheme

    return archive_path
Example #8
0
def apply_patchqueue(base_repo, pq_repo, prefix):
    """
    Link and then apply a patchqueue repository to a source repository
    """
    status_path = Path(pq_repo.working_dir, prefix, 'status')
    patches_link = Path(base_repo.git_dir, 'patches',
                        base_repo.active_branch.name)

    # make the directory tree for the patches within the base repo
    # pylint: disable=no-member
    patches_link.parent.mkdir(parents=True)

    # link the patchqueue directory for the base repo branch
    rel_path = relpath(str(status_path.parent), str(patches_link.parent))
    patches_link.symlink_to(rel_path)

    # create an empty status file
    with status_path.open('w'):
        pass

    patches = subprocess.check_output(['guilt', 'series'],
                                      cwd=base_repo.working_dir)
    if patches:
        subprocess.check_call(['guilt', 'push', '--all'],
                              cwd=base_repo.working_dir)
Example #9
0
def _iter_markdown(input_path: pl.Path) -> typ.Iterable[lptyp.RawMarkdown]:
    prose_lines: lptyp.Lines = []

    # TODO: encoding from config
    with input_path.open(mode="r", encoding="utf-8") as fh:
        input_lines = enumerate(fh)
        for i, line_val in input_lines:
            if is_fence(line_val):
                # TODO: The separation here isn't very nice.
                #   Mentally keeping track of the state of the
                #   line iterator (has it gone past the fence?) is
                #   more complicated than it probably has to be.
                if prose_lines:
                    yield lptyp.RawElement(input_path, prose_lines)
                    prose_lines = []

                fence_str         = line_val[:3]
                info_string       = line_val[3:].strip()
                inner_block_lines = list(_iter_fenced_block_lines(fence_str, input_lines))
                lineno            = i + 1
                first_line        = lptyp.Line(lineno, line_val)
                last_line         = lptyp.Line(lineno + len(inner_block_lines) + 1, fence_str + "\n")
                block_lines       = [first_line] + inner_block_lines + [last_line]

                yield lptyp.RawFencedBlock(input_path, block_lines, info_string)
            else:
                line_no = i + 1
                line    = lptyp.Line(line_no, line_val)
                prose_lines.append(line)

    if prose_lines:
        yield lptyp.RawElement(input_path, prose_lines)
Example #10
0
    def __init__(self, config):
        super(AggregationRawdataGenerator, self).__init__()
        self._config = config

        self._credentials = config["credentials"]
        if self._credentials == "automation":
            self._username = self._credentials

            secret_file_path = Path(
                cmk.utils.paths.var_dir
            ) / "web" / self._username / "automation.secret"

            with secret_file_path.open(encoding="utf-8") as f:
                self._secret = f.read()
        else:
            self._username, self._secret = self._credentials[1]

        site_config = config["site"]

        if site_config == "local":
            self._site_url = "http://localhost:%d/%s" % (
                cmk.utils.site.get_apache_port(), cmk_version.omd_site())
        else:
            self._site_url = site_config[1]

        self._errors = []
Example #11
0
def archive_resource(resource, destination):
    """
    Write an archive of a resource
    """
    archive_path = Path(destination, resource.basename)
    if resource.is_repo:
        temp_dir = tempfile.mkdtemp(prefix='clone-')
        try:
            repo = clone(resource.url, temp_dir, resource.commitish)
            logging.debug("Archiving %s@%s to %s", resource.url,
                          resource.commitish, archive_path)
            with archive_path.open("wb") as output:
                repo.archive(output, treeish=str(resource.commitish),
                             prefix=resource.prefix)
        finally:
            shutil.rmtree(temp_dir, ignore_errors=True)
    else:
        url = urlparse(resource.url)
        if url.scheme in SUPPORTED_URL_SCHEMES:
            logging.debug("Fetching %s to %s", resource.url, archive_path)
            fetch_url(url, str(archive_path), 5)
        elif url.scheme in ['', 'file'] and url.netloc == '':
            logging.debug("Copying %s to %s", url.path, archive_path)
            shutil.copyfile(url.path, str(archive_path))
    # else: UnsupportedScheme

    return archive_path
Example #12
0
def load_legacy(filename):
    m = Path(filename)
    name = m.stem
    d = {}
    c = count()
    r = True

    def num(s):
        try:
            return int(s)
        except ValueError:
            return float(s)

    with m.open() as f:
        while r:
            c.next()
            r = re.search("([^\d\W]+)\s+(-*\d+\.*\d*)", f.readline())
            if r:
                d[r.groups()[0]] = num(r.groups()[1])
    l = c.next() - 1
    data = np.loadtxt(str(m.resolve()), skiprows=l)
    dataset = NpDataset(data, resolution=d["cellsize"])
    if "UTMzone" in d:
        gp = GeoPoint(UTM("UTMzone"), d["xllcorner"], d["yllcorner"])
    else:
        gp = GeoPoint(UTM(1), d["xllcorner"], d["yllcorner"])
    return GridMesh(gp, dataset)
def segment(path, model, word2vec, output_folder, wiki=False):

    for filename in glob.glob(path + '*.txt'):
        with open(filename, "r+") as f:
            paragraph = f.read()
            sentences = [clean_paragraph(paragraph)]

            cutoffs = evaluate.predict_cutoffs(sentences, model, word2vec)
            total = []
            segment = []
            for i, (sentence, cutoff) in enumerate(zip(sentences, cutoffs)):
                segment.append(sentence)
                if cutoff:
                    full_segment = '.'.join(segment) + '.'
                    full_segment = full_segment + '\n' + section_delimiter + '\n'
                    total.append(full_segment)
                    segment = []

        file_id = str(filename).split('/')[-1:][0]

        # Model does not return prediction for last sentence
        segment.append(sentences[-1:][0])
        total.append('.'.join(segment))

        output_file_content = "".join(total)
        output_file_full_path = Path(output_folder).joinpath(Path(file_id))
        with output_file_full_path.open('w') as f:
            f.write(output_file_content)
Example #14
0
def dump_thermals_to_cup_file(flight, cup_filename_local):
    """Dump flight's thermals to a .cup file (SeeYou).

    Args:
        flight: an igc_lib.Flight, the flight to be written
        cup_filename_local: a string, the name of the file to be written.
    """
    cup_filename = Path(cup_filename_local).expanduser().absolute()
    with cup_filename.open('wt') as wpt:
        wpt.write(u'name,code,country,lat,')
        wpt.write(u'lon,elev,style,rwdir,rwlen,freq,desc,userdata,pics\n')

        def write_fix(name, fix):
            lat = _degrees_float_to_degrees_minutes_seconds(fix.lat, 'lat')
            lon = _degrees_float_to_degrees_minutes_seconds(fix.lon, 'lon')
            wpt.write(u'"%s",,,%02d%02d.%03d%s,' % (
                name, lat.degrees, lat.minutes,
                int(round(lat.seconds/60.0*1000.0)), lat.hemisphere))
            wpt.write(u'%03d%02d.%03d%s,%fm,,,,,,,' % (
                lon.degrees, lon.minutes,
                int(round(lon.seconds/60.0*1000.0)), lon.hemisphere,
                fix.gnss_alt))
            wpt.write(u'\n')

        for i, thermal in enumerate(flight.thermals):
            write_fix(u'%02d' % i, thermal.enter_fix)
            write_fix(u'%02d_END' % i, thermal.exit_fix)
Example #15
0
def test_manager_get_autochecks_of(test_config, autochecks_content,
                                   expected_result):
    autochecks_file = Path(cmk.utils.paths.autochecks_dir, "host.mk")
    with autochecks_file.open("w", encoding="utf-8") as f:  # pylint: disable=no-member
        f.write(autochecks_content)

    manager = test_config._autochecks_manager

    if expected_result is MKGeneralException:
        with pytest.raises(MKGeneralException):
            manager.get_autochecks_of("host", config.compute_check_parameters,
                                      config.service_description,
                                      config.get_check_variables)
        return

    result = manager.get_autochecks_of("host", config.compute_check_parameters,
                                       config.service_description,
                                       config.get_check_variables)
    assert result == expected_result

    # Check that the ConfigCache method also returns the correct data
    assert test_config.get_autochecks_of("host") == result

    # Check that there are no str items (None, int, ...)
    assert all(not isinstance(s.item, str) for s in result)
    # All desriptions need to be unicode
    assert all(isinstance(s.description, six.text_type) for s in result)
Example #16
0
def dump_thermals_to_cup_file(flight, cup_filename_local):
    """Dump flight's thermals to a .cup file (SeeYou).

    Args:
        flight: an igc_lib.Flight, the flight to be written
        cup_filename_local: a string, the name of the file to be written.
    """
    cup_filename = Path(cup_filename_local).expanduser().absolute()
    with cup_filename.open('wt') as wpt:
        wpt.write(u'name,code,country,lat,')
        wpt.write(u'lon,elev,style,rwdir,rwlen,freq,desc,userdata,pics\n')

        def write_fix(name, fix):
            lat = _degrees_float_to_degrees_minutes_seconds(fix.lat, 'lat')
            lon = _degrees_float_to_degrees_minutes_seconds(fix.lon, 'lon')
            wpt.write(
                u'"%s",,,%02d%02d.%03d%s,' %
                (name, lat.degrees, lat.minutes,
                 int(round(lat.seconds / 60.0 * 1000.0)), lat.hemisphere))
            wpt.write(u'%03d%02d.%03d%s,%fm,,,,,,,' %
                      (lon.degrees, lon.minutes,
                       int(round(lon.seconds / 60.0 * 1000.0)), lon.hemisphere,
                       fix.gnss_alt))
            wpt.write(u'\n')

        for i, thermal in enumerate(flight.thermals):
            write_fix(u'%02d' % i, thermal.enter_fix)
            write_fix(u'%02d_END' % i, thermal.exit_fix)
Example #17
0
    def __init__(self, retrain=False):
        self.hats = list(range(4))
        self.classifier = None
        self.helmet_probabilities = np.ones(4)

        # Get base-sky
        print('********************************')
        print(os.getcwd())
        print('********************************')
        for val in Paths.helmet_data.glob('*.p'):
            print('********************************')
            print('********************************')
            print('********************************')
            print(val)
            print('********************************')
            print('********************************')
            print('********************************')
        print('********************************')
        path = Path(Paths.helmet_data, "base_sky.p")
        (_, _, _), _, _, frame = pickle.load(path.open("rb"))
        self.base_frame = self.to_matrix(frame)
        self.base_sky = self.get_sky(frame)

        # If set to train
        if retrain:
            self.train_from_path()
Example #18
0
def apply_patchqueue(base_repo, pq_repo, prefix):
    """
    Link and then apply a patchqueue repository to a source repository
    """
    status_path = Path(pq_repo.working_dir, prefix, 'status')
    patches_link = Path(base_repo.git_dir, 'patches',
                        base_repo.active_branch.name)

    # make the directory tree for the patches within the base repo
    # pylint: disable=no-member
    patches_link.parent.mkdir(parents=True)

    # link the patchqueue directory for the base repo branch
    rel_path = relpath(str(status_path.parent), str(patches_link.parent))
    patches_link.symlink_to(rel_path)

    # create an empty status file
    with status_path.open('w'):
        pass

    patches = subprocess.check_output(['guilt', 'series'],
                                      cwd=base_repo.working_dir)
    if patches:
        subprocess.check_call(['guilt', 'push', '--all'],
                              cwd=base_repo.working_dir)
def test_check_crash_report_read_snmp_info(monkeypatch):
    Scenario().apply(monkeypatch)
    config.load_checks(
        check_api.get_check_api_context,
        ["%s/uptime" % cmk.utils.paths.checks_dir,
         "%s/snmp_uptime" % cmk.utils.paths.checks_dir])

    cache_path = Path(cmk.utils.paths.data_source_cache_dir, "snmp", "testhost")
    cache_path.parent.mkdir(parents=True, exist_ok=True)  # pylint: disable=no-member
    with cache_path.open("w", encoding="utf-8") as f:
        f.write(u"[]\n")

    try:
        raise Exception("DING")
    except Exception:
        crash = crash_reporting.CheckCrashReport.from_exception_and_context(
            hostname="testhost",
            check_plugin_name="snmp_uptime",
            item=None,
            is_manual_check=False,
            params=None,
            description=u"Uptime",
            info="X",
            text=u"Output",
        )

    assert crash.agent_output is None
    assert crash.snmp_info == u"[]\n"
def test_cfg(monkeypatch):
    test_hosts = [
        "ds-test-host1", "ds-test-host2", "ds-test-node1", "ds-test-node2"
    ]

    ts = Scenario()

    for h in test_hosts:
        ts.add_host(h)

    ts.set_option("ipaddresses", dict((h, "127.0.0.1") for h in test_hosts))
    ts.add_cluster("ds-test-cluster1",
                   nodes=["ds-test-node1", "ds-test-node2"])

    ts.set_ruleset("datasource_programs", [
        ('cat %s/<HOST>' % cmk.utils.paths.tcp_cache_dir, [], test_hosts, {}),
    ])

    with open("%s/tests/integration/cmk/base/test-files/linux-agent-output" %
              repo_path()) as f:
        linux_agent_output = f.read().decode("utf-8")

    for h in test_hosts:
        cache_path = Path(cmk.utils.paths.tcp_cache_dir, h)
        cache_path.parent.mkdir(parents=True, exist_ok=True)  # pylint: disable=no-member

        with cache_path.open("w", encoding="utf-8") as f:
            f.write(linux_agent_output)

    return ts.apply(monkeypatch)
Example #21
0
    def convert_to_csv(self,
                       dat_path: str,
                       csv_path: Optional[str] = '') -> None:
        """Convert a .dat file to csv

        Check whether the .dat file exists
        Then read it
        Finally store its .csv equivalent

        :see: https://tools.ietf.org/html/rfc4180

        :param dat_path: path to the .dat file
        :param csv_path: name and location of the generated .csv file
        :return: None
        """
        # checking source file integrity
        source = Path(dat_path)
        if not source.exists():
            raise FileNotFoundError

        if source.suffix != Dat.ext:
            raise BadFileFormatException(
                f'source file should contains the extension: '
                f'{Dat.ext}')

        # checking output file integrity
        if not csv_path:
            csv_path = f'{self.DEFAULT_OUTPUT_FOLDER}' \
                f'{source.name.replace(Dat.ext, Csv.ext)}'
        else:
            if not csv_path.endswith(Csv.ext):
                raise BadFileFormatException(
                    f'output should contains the extension: '
                    f'{Csv.ext}')
        output = Path(csv_path)

        if not output.exists():
            output.touch()

        # formatting content
        with source.open(mode='r', encoding=Dat.encoding) as src:
            content = src.readlines()

        # writing the formatted content
        with output.open(mode='w', encoding=Csv.encoding) as dest:
            for row in self._format_dirty_content(content):
                dest.write(row)
Example #22
0
 def store_classifier(self, path=None):
     """
     Stores currently trained classifier.
     :param Path path: 
     """
     if path is None:
         path = Path(Paths.helmet_data, "helmet_classifier.p")
     pickle.dump(self.classifier, path.open("wb"))
Example #23
0
 def load_classifier(self, path=None):
     """
     Loads classifier from file.
     :param Path path: 
     """
     if path is None:
         path = Path(Paths.helmet_data, "helmet_classifier.p")
     self.classifier = pickle.load(path.open("rb"))
Example #24
0
def test_save_autochecks_file(items, expected_content):
    autochecks.save_autochecks_file("host", items)

    autochecks_file = Path(cmk.utils.paths.autochecks_dir, "host.mk")
    with autochecks_file.open("r", encoding="utf-8") as f:  # pylint: disable=no-member
        content = f.read()

    assert expected_content == content
Example #25
0
    def test_config_backup_creation_no_tags(self):
        path_config_backup = Path(settings.CONFIGURATION_LAST_GOOD_FILEPATH)
        fs_cleanup(filename=force_text(path_config_backup))

        Setting.save_last_known_good()
        self.assertTrue(path_config_backup.exists())

        with path_config_backup.open(mode='r') as file_object:
            self.assertFalse('!!python/' in file_object.read())
Example #26
0
def _read_snmp_info(hostname):
    # type: (str) -> Optional[Text]
    cache_path = Path(cmk.utils.paths.data_source_cache_dir, "snmp", hostname)
    try:
        with cache_path.open(encoding="utf-8") as f:
            return f.read()
    except IOError:
        pass
    return None
Example #27
0
def test_lock_with_pid_file(tmpdir):
    pid_file = Path(tmpdir) / "test.pid"

    daemon.lock_with_pid_file(pid_file)

    assert store.have_lock("%s" % pid_file)

    with pid_file.open() as f:
        assert int(f.read()) == os.getpid()
Example #28
0
    def send_result_message(self, info):
        """ The result message is written to stdout because of log output clarity
        as well as into a distinct file, to separate this info from the rest of the context information"""
        encoded_info = "%s\n" % info.encode("utf-8")
        sys.stdout.write(encoded_info)

        result_message_path = Path(
            self.get_work_dir()) / BackgroundJobDefines.result_message_filename
        with result_message_path.open("ab") as f:  # pylint: disable=no-member
            f.write(encoded_info)
Example #29
0
    def get_file_content(self, file_name: Path) -> list:
        """

        :param file_name:
        :return:
        """
        try:
            with file_name.open('r') as file:
                content = file.readlines()
            return content
        except FileNotFoundError as message1:
            self.logger.error(message1)
            return []
Example #30
0
def cmd_run(path):
    """
    Runs an appliction.
    """
    os.chdir(path)
    package = Path("./package.json")
    if not package.is_file():
        raise Exception("Invalid package: no package.json file")

    package = json.load(package.open())

    if "engines" not in package or package["engines"] == {}:
        raise Exception("Invalid package: no engines specified")

    r = requests.get("%s/index.json" % Particle.REPO)
    r.raise_for_status()
    remote_particles = r.json()["particles"]

    variables = {}
    for name, range_ in package["engines"].items():
        p = Particle.get_local(name, range_)
        if not p:
            # if auto_fetch:
            if name in remote_particles:
                v = semver.max_satisfying(remote_particles[name], range_,
                                          False)
                if v:
                    print("Downloading %s %s..." % (name, v))
                    p = Particle.fetch(name, v)
                else:
                    print("Cannot satisfy %s (%s), aborting." % (name, range_))
                    sys.exit(1)
            else:
                print("No particle named %s exists, aborting." % name)
                sys.exit(1)
        variables["$" + name.upper().replace("-", "_")] = str(p.main)

    pattern = re.compile('|'.join(map(re.escape, variables.keys())))

    if "lepton" not in package:
        raise Exception("Invalid package: no lepton key in particle.json")
    elif "run" not in package["lepton"]:
        raise Exception("Invalid package: no lepton.run key in particle.json")

    args = package["lepton"]["run"]
    args = pattern.sub(lambda x: variables[x.group()], args)
    args = shlex.split(args)
    print("Resulting command line: %r" % args)
    print("Current dir: %s" % os.getcwd())
    os.execvp(args[0], args)
Example #31
0
 def __init__(self, num: int):
     data_path = Path(f"data/{num}.txt")
     if not data_path.is_file():
         self.id = -1
     else:
         with data_path.open() as file:
             info = file.readline().split()
             id, name = info[0], " ".join(info[1:])
             assert (num == int(id))
             self.id = num
             self.name = name
             self.type_l = file.readline().split()
             self.sprite = f"sprites/{num}.png"
     pass
Example #32
0
def check_df_sources_include_flag():
    """Verify that df.include files are can return fs_used metric name"""
    checks_dirs = (cmk.utils.paths.local_checks_dir, cmk.utils.paths.checks_dir)
    logger.info("Looking for df.include files...")
    for path_dir in checks_dirs:
        df_file = Path(path_dir, 'df.include')
        if df_file.exists():
            logger.info("Inspecting %s", df_file)
            with df_file.open('r') as fid:
                r = fid.read()
                mat = re.search('^df_use_fs_used_as_metric_name *= *(True|False)', r, re.M)
                if not mat:
                    raise RuntimeError('df.include sources not yet ready to for new setup')
            logger.info("  Include file implements new fs_used as perfvalue")
Example #33
0
def cmd_run(path):
    """
    Runs an appliction.
    """
    os.chdir(path)
    package = Path("./package.json")
    if not package.is_file():
        raise Exception("Invalid package: no package.json file")

    package = json.load(package.open())

    if "engines" not in package or package["engines"] == {}:
        raise Exception("Invalid package: no engines specified")

    r = requests.get("%s/index.json" % Particle.REPO)
    r.raise_for_status()
    remote_particles = r.json()["particles"]

    variables = {}
    for name, range_ in package["engines"].items():
        p = Particle.get_local(name, range_)
        if not p:
            # if auto_fetch:
            if name in remote_particles:
                v = semver.max_satisfying(remote_particles[name], range_, False)
                if v:
                    print("Downloading %s %s..." % (name, v))
                    p = Particle.fetch(name, v)
                else:
                    print("Cannot satisfy %s (%s), aborting." % (name, range_))
                    sys.exit(1)
            else:
                print("No particle named %s exists, aborting." % name)
                sys.exit(1)
        variables["$" + name.upper().replace("-", "_")] = str(p.main)

    pattern = re.compile('|'.join(map(re.escape, variables.keys())))

    if "lepton" not in package:
        raise Exception("Invalid package: no lepton key in particle.json")
    elif "run" not in package["lepton"]:
        raise Exception("Invalid package: no lepton.run key in particle.json")

    args = package["lepton"]["run"]
    args = pattern.sub(lambda x: variables[x.group()], args)
    args = shlex.split(args)
    print("Resulting command line: %r" % args)
    print("Current dir: %s" % os.getcwd())
    os.execvp(args[0], args)
Example #34
0
 def write_file(self, rel_path, content):
     if not self._is_running_as_site_user():
         p = self.execute(["tee", self.path(rel_path)],
                          stdin=subprocess.PIPE,
                          stdout=open(os.devnull, "w"))
         p.communicate(six.ensure_text(content))
         p.stdin.close()
         if p.wait() != 0:
             raise Exception("Failed to write file %s. Exit-Code: %d" %
                             (rel_path, p.wait()))
     else:
         file_path = Path(self.path(rel_path))
         file_path.parent.mkdir(parents=True, exist_ok=True)
         with file_path.open("w", encoding="utf-8") as f:
             f.write(content)
Example #35
0
def dump_flight_to_csv(flight, track_filename_local, thermals_filename_local):
    """Dumps flight data to CSV files.

    Args:
        flight: an igc_lib.Flight, the flight to be written
        track_filename_local: a string, the name of the output CSV with track data
        thermals_filename_local: a string, the name of the output CSV with thermal data
    """
    track_filename = Path(track_filename_local).expanduser().absolute()
    with track_filename.open('wt') as csv:
        csv.write(u"timestamp,lat,lon,bearing,bearing_change_rate,"
                  u"gsp,flying,circling\n")
        for fix in flight.fixes:
            csv.write(u"%f,%f,%f,%f,%f,%f,%s,%s\n" % (
                fix.timestamp, fix.lat, fix.lon,
                fix.bearing, fix.bearing_change_rate,
                fix.gsp, str(fix.flying), str(fix.circling)))

    thermals_filename = Path(thermals_filename_local).expanduser().absolute()
    with thermals_filename.open('wt') as csv:
        csv.write(u"timestamp_enter,timestamp_exit\n")
        for thermal in flight.thermals:
            csv.write(u"%f,%f\n" % (
                thermal.enter_fix.timestamp, thermal.exit_fix.timestamp))
Example #36
0
    def test_locking_support(self):
        watch_folder = self._create_test_watchfolder()

        shutil.copy(TEST_SMALL_DOCUMENT_PATH, self.temporary_directory)

        path_test_file = Path(self.temporary_directory,
                              TEST_SMALL_DOCUMENT_FILENAME)

        with path_test_file.open(mode='rb+') as file_object:
            fcntl.lockf(file_object, fcntl.LOCK_EX | fcntl.LOCK_NB)
            process = Process(target=watch_folder.check_source)
            process.start()
            process.join()

            self.assertEqual(Document.objects.count(), 0)
Example #37
0
    def create_from_file(filename, config_class=FlightParsingConfig):
        """Creates an instance of Flight from a given file.

        Args:
            filename: a string, the name of the input IGC file
            config_class: a class that implements FlightParsingConfig

        Returns:
            An instance of Flight built from the supplied IGC file.
        """
        config = config_class()
        fixes = []
        a_records = []
        i_records = []
        h_records = []
        abs_filename = Path(filename).expanduser().absolute()
        with abs_filename.open('r', encoding="ISO-8859-1") as flight_file:
            for line in flight_file:
                line = line.replace('\n', '').replace('\r', '')
                if not line:
                    continue
                if line[0] == 'A':
                    a_records.append(line)
                elif line[0] == 'B':
                    fix = GNSSFix.build_from_B_record(line, index=len(fixes))
                    if fix is not None:
                        if fixes and math.fabs(fix.rawtime - fixes[-1].rawtime) < 1e-5:
                            # The time did not change since the previous fix.
                            # Ignore this fix.
                            pass
                        else:
                            fixes.append(fix)
                elif line[0] == 'I':
                    i_records.append(line)
                elif line[0] == 'H':
                    h_records.append(line)
                else:
                    # Do not parse any other types of IGC records
                    pass
        flight = Flight(fixes, a_records, h_records, i_records, config)
        return flight
Example #38
0
def dump_thermals_to_wpt_file(flight, wptfilename_local, endpoints=False):
    """Dump flight's thermals to a .wpt file in Geo format.

    Args:
        flight: an igc_lib.Flight, the flight to be written
        wptfilename_local: File to be written. If it exists it will be overwritten.
        endpoints: optional argument. If true thermal endpoints as well
        as startpoints will be written with suffix END in the waypoint label.
    """
    wptfilename = Path(wptfilename_local).expanduser().absolute()
    with wptfilename.open('w') as wpt:
        wpt.write(u"$FormatGEO\n")

        for x, thermal in enumerate(flight.thermals):
            lat = _degrees_float_to_degrees_minutes_seconds(
                flight.thermals[x].enter_fix.lat, 'lat')
            lon = _degrees_float_to_degrees_minutes_seconds(
                flight.thermals[x].enter_fix.lon, 'lon')
            wpt.write(u"%02d        " % x)
            wpt.write(u"%s %02d %02d %05.2f    " % (
                lat.hemisphere, lat.degrees, lat.minutes, lat.seconds))
            wpt.write(u"%s %03d %02d %05.2f     " % (
                lon.hemisphere, lon.degrees, lon.minutes, lon.seconds))
            wpt.write(u"          %d\n" % flight.thermals[x].enter_fix.gnss_alt)

            if endpoints:
                lat = _degrees_float_to_degrees_minutes_seconds(
                    flight.thermals[x].exit_fix.lat, 'lat')
                lon = _degrees_float_to_degrees_minutes_seconds(
                    flight.thermals[x].exit_fix.lon, 'lon')
                wpt.write(u"%02dEND     " % x)
                wpt.write(u"%s %02d %02d %05.2f    " % (
                    lat.hemisphere, lat.degrees, lat.minutes, lat.seconds))
                wpt.write(u"%s %03d %02d %05.2f     " % (
                    lon.hemisphere, lon.degrees, lon.minutes, lon.seconds))
                wpt.write(u"          %d\n" % (
                    flight.thermals[x].exit_fix.gnss_alt))
Example #39
0
def cmd_run(path):
    """
    Runs an appliction.
    """
    os.chdir(path)
    package = Path("./package.json")
    if not package.is_file():
        raise Exception("Invalid package: no package.json file")

    package = json.load(package.open())

    if "engines" not in package or package["engines"] == {}:
        raise Exception("Invalid package: no engines specified")

    variables = {}
    for name, version in package["engines"].items():
        p = Particle.get_local(name, version)
        if not p:
            # if auto_fetch:
            print("Downloading %s..." % name)
            p = Particle.fetch(name, version)
        variables["$" + name.upper().replace("-", "_")] = str(p.main)

    pattern = re.compile('|'.join(map(re.escape, variables.keys())))

    if "lepton" not in package:
        raise Exception("Invalid package: no lepton key in particle.json")
    elif "run" not in package["lepton"]:
        raise Exception("Invalid package: no lepton.run key in particle.json")

    args = package["lepton"]["run"]
    args = pattern.sub(lambda x: variables[x.group()], args)
    args = shlex.split(args)
    print("Resulting command line: %r" % args)
    print("Current dir: %s" % os.getcwd())
    os.execvp(args[0], args)