Ejemplo n.º 1
0
    def __init__(self, rtd, **kwargs):
        self.data = cached_table_fetch(
            'per_platform',
            table_generator=lambda: self.generate_data(rtd),
            push=True,
            **kwargs)

        self.data = self.group_uncommon(self.data)
Ejemplo n.º 2
0
    def hafas_obstacles_to_sql_table(self):
        obstacles = cached_table_fetch('obstacle', **self.kwargs)
        simpler_obstacles = {
            'from_time': [],
            'to_time': [],
            'from_id': [],
            'to_id': [],
            'from_station': [],
            'to_station': [],
            'dir': [],
            'type': [],
            'summary': [],
            'text': [],
            'category': [],
            'modified': [],
            'priority': [],
            'priority_text': [],
            'icon_title': [],
            'icon_type': [],
        }

        for i, obstacle in obstacles.iterrows():
            obstacle = obstacle['data']
            if 'edges' not in obstacle:
                print('no edges')
                continue

            for edge in obstacle['edges']:
                for event in obstacle['events']:
                    simpler_obstacles['from_time'].append(event['start'])
                    simpler_obstacles['to_time'].append(event['end'])

                    simpler_obstacles['from_station'].append(
                        edge['fromLoc']['name'])
                    simpler_obstacles['to_station'].append(
                        edge['toLoc']['name'])
                    simpler_obstacles['from_id'].append(edge['fromLoc']['id'])
                    simpler_obstacles['to_id'].append(edge['toLoc']['id'])
                    simpler_obstacles['dir'].append(edge['dir'])

                    simpler_obstacles['icon_title'].append(
                        edge['icon']['title'])
                    simpler_obstacles['icon_type'].append(edge['icon']['type'])

                    simpler_obstacles['type'].append(obstacle['type'])
                    simpler_obstacles['summary'].append(obstacle['summary'])
                    simpler_obstacles['text'].append(obstacle['text'])
                    simpler_obstacles['category'].append(obstacle['category'])
                    simpler_obstacles['modified'].append(obstacle['modified'])
                    simpler_obstacles['priority'].append(obstacle['priority'])
                    simpler_obstacles['priority_text'].append(
                        priorities_text[obstacle['priority']])

        self.simpler_obstacles = pd.DataFrame(simpler_obstacles)
        self.simpler_obstacles['from_time'] = pd.to_datetime(
            self.simpler_obstacles['from_time'])
        self.simpler_obstacles['to_time'] = pd.to_datetime(
            self.simpler_obstacles['to_time'])
Ejemplo n.º 3
0
    def __init__(self, rtd, **kwargs):
        self.data = cached_table_fetch(
            'per_train_type',
            index_col='c',
            table_generator=lambda: self.generate_data(rtd),
            push=True,
            **kwargs)

        self.data = self.group_uncommon(self.data)
Ejemplo n.º 4
0
    def __init__(self, **kwargs):
        super().__init__(**kwargs)
        self.kwargs = kwargs

        self.ds100_regex = r".*\(([A-Z\s]+)\)"
        self.betriebsstellen = BetriebsstellenBill(**kwargs)

        self.obstacles = cached_table_fetch('parsed_obstacles',
                                            **kwargs).set_index('edge_id')
        self.dict_index = {
            edge_id: index
            for index, edge_id in enumerate(self.obstacles.index)
        }
        self.np_obstacles = self.obstacles[[
            'priority', 'length', 'from_time', 'to_time'
        ]].to_numpy()
        self.simpler_obstacles = cached_table_fetch('simpler_obstacles',
                                                    **kwargs)
Ejemplo n.º 5
0
    def __init__(self, rtd, **kwargs):
        super().__init__(**kwargs)

        # The cache from an older version of this class on potentially older data should
        # not be used. Thus, we create a random version that is attached to the filenames
        # in the cache.
        self.version = f'{id(self):x}'

        self.data = cached_table_fetch(
            'per_station_over_time',
            table_generator=lambda: self.generate_data(rtd),
            push=True,
            **kwargs
        )

        # Setup Plot https://stackoverflow.com/questions/9401658/how-to-animate-a-scatter-plot
        self.fig, self.ax = dark_fig_ax_germany(crs=self.MAP_CRS)

        self.cmap = matplotlib.colors.LinearSegmentedColormap.from_list(
            "", ["green", "yellow", "red"]
        )

        self.sc = self.ax.scatter(
            np.zeros(1),
            np.zeros(1),
            c=np.zeros(1),
            s=np.zeros(1),
            cmap=self.cmap,
            vmin=0,
            vmax=7,
            alpha=0.5,
            zorder=10,
            transform=ccrs.PlateCarree()
        )

        self.colorbar = self.fig.colorbar(self.sc)
        self.colorbar.solids.set_edgecolor("face")
        self.colorbar.outline.set_linewidth(0)

        self.colorbar.ax.get_yaxis().labelpad = 15
        self.colorbar.ax.set_ylabel("Ø Verspätung in Minuten", rotation=270)

        for plot_name in self.DEFAULT_PLOTS:
            if not os.path.isfile(f"{CACHE_PATH}/plot_cache/{self.version}_{plot_name}.webp"):
                if plot_name == 'default':
                    self.ax.set_title('', fontsize=16)
                else:
                    self.ax.set_title(plot_name, fontsize=16)
                memory_buffer = io.BytesIO()
                self.fig.savefig(memory_buffer, dpi=300, transparent=True)
                image_to_webp(memory_buffer, f"{CACHE_PATH}/plot_cache/{self.version}_{plot_name}.webp")
Ejemplo n.º 6
0
    def __init__(self, **kwargs):
        if 'generate' in kwargs:
            kwargs['generate'] = False
            print('StationPhillip does not support generate')

        self.name_index_stations = cached_table_fetch('stations',
                                                      index_col='name',
                                                      **kwargs)
        self.name_index_stations['eva'] = self.name_index_stations[
            'eva'].astype(int)

        self.eva_index_stations = self.name_index_stations.reset_index(
        ).set_index('eva')
        self.ds100_index_stations = self.name_index_stations.reset_index(
        ).set_index('ds100')
        self.sta_list = self.name_index_stations.sort_values(
            by='number_of_events', ascending=False).index.to_list()
    def __init__(self, **kwargs):
        if 'generate' in kwargs:
            kwargs['generate'] = False
            print('StreckennetzSteffi does not support generate')

        super().__init__(**kwargs)

        streckennetz_df = cached_table_fetch('minimal_streckennetz', **kwargs)

        tuples = [
            tuple(x) for x in streckennetz_df[['u', 'v', 'length']].values
        ]
        self.streckennetz_igraph = igraph.Graph.TupleList(
            tuples, directed=False, edge_attrs=['length'])

        self.get_length = lambda edge: self.streckennetz_igraph.es[edge][
            'length']
    def __init__(self, **kwargs):
        self.name_index_betriebsstellen = cached_table_fetch('betriebstellen', index_col='name', **kwargs)

        self.ds100_index_betriebsstellen = self.name_index_betriebsstellen.reset_index().set_index('ds100')
        self.betriebsstellen_list = self.name_index_betriebsstellen.dropna(subset=['lat', 'lon']).index.to_list()
        self.NoLocationError = NoLocationError