Пример #1
0
    def _write_language_file(self, out_file):
        coords = Coordinates()
        langs = defaultdict(lambda: {'count': 0, 'group': None})
        for year, entry_list in self.groups:
            if year >= START_YEAR and year <= END_YEAR:
                entry_list = list(entry_list)
                for entry in entry_list:
                    langs[entry.language]['count'] += 1
                    langs[entry.language]['group'] = entry.language_group_initial()

        for language in langs.keys():
            # Number of possible points (between 3 and 30, depending on
            #  the frequency of the language)
            num_points = int(langs[language]['count'] / 5)
            num_points = max(4, min(num_points, 30))
            # Select a bunch of random points within the language's geo region
            langs[language]['coords'] = [coords.randomize(
                language, decimalPlaces=2) for i in range(num_points)]

        langs2 = []
        for language, vals in langs.items():
            langs2.append({'l': language,
                           'g': vals['group'],
                           'c': vals['coords']})
        with open(out_file, 'w') as filehandle:
            json.dump(langs2, filehandle)

        language_index = {row['l']: i for i, row in enumerate(langs2)}
        return language_index
Пример #2
0
    def get(self, query):
        parsed_coordinates = Coordinates.parse(query)
        if parsed_coordinates is not None:
            target = {
                'ra': parsed_coordinates.ra,
                'dec': parsed_coordinates.dec
            }
            get_app_state().target = parsed_coordinates
            return jsonify(target)

        catalog_result = get_catalog().get_entry(query.upper())
        if catalog_result is not None:
            parsed_coordinates = Coordinates.parse_csvformat(
                catalog_result['RA'], catalog_result['Dec'])
            target = {
                'name': catalog_result['Name'],
                'ra': parsed_coordinates.ra,
                'dec': parsed_coordinates.dec,
                'type': catalog_result.get('Type'),
                'const': catalog_result.get('Const'),
                'minAx': catalog_result.get('MinAx'),
                'majAx': catalog_result.get('MajAx'),
                'posAng': catalog_result.get('PosAng'),
            }
            get_app_state().target = parsed_coordinates
            return jsonify(target)

        return '', 404
Пример #3
0
    def store_values(self):
        print('Loading coordinates...')
        coords = Coordinates()
        print('Checking language overrides...')
        overrides = LanguageOverrides().list_language_overrides()
        print('Loading OED vital statistics...')
        vitalstats = VitalStatisticsCache()

        entries = []
        iterator = FrequencyIterator(message='Listing entries')
        for entry in iterator.iterate():
            if (entry.has_frequency_table() and
                    not ' ' in entry.lemma and
                    not '-' in entry.lemma):
                language_breadcrumb = vitalstats.find(entry.id, field='language')
                year = vitalstats.find(entry.id, field='first_date') or 0

                languages = []
                if language_breadcrumb is not None:
                    languages = [l for l in language_breadcrumb.split('/')
                                 if coords.is_listed(l)
                                 or l == 'English']
                else:
                    languages = ['unspecified', ]
                if entry.id in overrides:
                    languages = [overrides[entry.id], ]

                if languages:
                    # pick the most granular level (e.g. 'Icelandic' in
                    #  preference to 'Germanic')
                    language = languages[-1]
                    # Find frequency for this word
                    freq_table = entry.frequency_table()
                    frequency = freq_table.frequency(period='modern')
                    band = freq_table.band(period='modern')
                    row = (entry.lemma,
                           entry.label,
                           entry.id,
                           year,
                           frequency,
                           band,
                           language)
                    entries.append(row)

        entries = sorted(entries, key=lambda entry: entry[2])

        with (open(self.out_file, 'w')) as csvfile:
            writer = csv.writer(csvfile)
            writer.writerows(entries)
Пример #4
0
        def analyze_fun():
            app_state.calibrating = True
            calibration_data = Solver().analyze_image(
                filepath,
                timeout,
                run_callback=lambda: app_state.calibrating,
            )
            app_state.calibrating = False

            timestamp = int(datetime.datetime.now().timestamp())

            if calibration_data is None:
                log_event('Calibration failed')
            else:
                rotation_angle = calibration_data.rotation_angle
                position = calibration_data.center_deg

                log_event(
                    f'Image center: {position} Rotation: {rotation_angle}')

                if app_state.target is not None:
                    target_distance = Coordinates(
                        app_state.target.ra - position.ra,
                        app_state.target.dec - position.dec)
                    log_event(f'Distance to target: {target_distance}')

                app_state.last_known_position = {
                    'timestamp': timestamp,
                    'position': position,
                }
Пример #5
0
    def analyze_image(self,
                      filepath,
                      timeout=30,
                      run_callback=None) -> Union[CalibrationData, None]:
        command = self.get_analyze_command([filepath], timeout)
        output = run_command_or_die_trying(command, timeout, run_callback)
        if output is None:
            return None

        # Output example:
        #
        # [...] pixel scale 0.907073 arcsec/pix.
        # [...]
        # Field: toughstuff/s10212.tif
        # Field center: (RA,Dec) = (114.133515, 65.594210) deg.
        # Field center: (RA H:M:S, Dec D:M:S) = (07:36:32.044, +65:35:39.156).
        # Field size: 28.9649 x 16.3092 arcminutes
        # Field rotation angle: up is 1.76056 degrees E of N
        # Field parity: pos
        # [...]

        output_regexes = {
            'pixel_scale':
            r'^.*pixel scale (?P<scale>[\d\.]*) (?P<unit>[\w\/]*)\..*$',
            'center_deg':
            r'^.*Field center: \(RA,Dec\) = \((?P<ra>[\d\.]*), (?P<dec>[\-\d\.]*)\) deg\..*$',
            'center':
            r'^.*Field center: \(RA H:M:S, Dec D:M:S\) = \((?P<ra>[\d\.\:]*), (?P<dec>[\d\.\:\+\-]*)\)\..*$',
            'size':
            r'^.*Field size: (?P<width>[\d\.]*) x (?P<height>[\d\.]*) (?P<unit>\w*).*$',
            'rotation':
            r'^.*Field rotation angle: up is (?P<angle>[\-\d\.]*) degrees (?P<direction>[WE]) of N.*$',
            'parity': r'^.*Field parity: (?P<parity>pos|neg).*$',
        }

        parsed_data = {}
        for output_key, output_regex in output_regexes.items():
            rx = re.compile(output_regex, re.DOTALL)
            match = rx.match(output)
            if not match:
                print(
                    f'WARN: No match found for "{output_key}" in output of solve-field of file {filepath}.'
                )
                print(
                    f'Field may not have been solved or the output of the solver could not be parsed. Full output:\n{output}'
                )
                return None
            parsed_data[output_key] = match.groupdict()

        return CalibrationData(
            pixel_scale=float(parsed_data['pixel_scale']['scale']),
            pixel_scale_unit=str(parsed_data['pixel_scale']['unit']),
            center_deg=Coordinates(float(parsed_data['center_deg']['ra']),
                                   float(parsed_data['center_deg']['dec'])),
            rotation_angle=float(parsed_data['rotation']['angle']),
            rotation_direction=str(parsed_data['rotation']['direction']),
            parity=str(parsed_data['parity']['parity']),
        )