def get(self, id): try: region = Region.get(Region.id == id) except Exception as e: print(e) abort(404, message="Not found") else: return region
def organize_assets_by_location(character, asset_list): asset_ids = set(entry['item_id'] for entry in asset_list) asset_dict = {entry['item_id']: entry for entry in asset_list} location_set = set(entry['location_id'] for entry in asset_list) location_data_dict = {id: {'items': {}} for id in location_set} for entry in asset_list: location_data_dict[entry['location_id']]['items'][ entry['item_id']] = entry for item_id, entry in asset_dict.items(): if item_id in location_data_dict: entry['items'] = location_data_dict[item_id]['items'] location_id_list = list( set(location_data_dict.keys()).difference(asset_ids)) location_model_dict = get_location_multi(character, location_id_list) systems_dict = {} for location_id in location_model_dict: location = location_model_dict[location_id] location_data_dict[location_id]['name'] = location.name if isinstance(location, System): system = location elif location.system_id is not None: system = System.get(location.system_id) else: system = DummySystem systems_dict[system.id] = systems_dict.get(system.id, (system, [])) systems_dict[system.id][1].append(location_id) return_dict = {} for system, location_list in systems_dict.values(): if system.region_id is not None: region = Region.get(system.region_id) else: region = DummyRegion if region.id not in return_dict: return_dict[region.id] = { 'redlisted': [], 'name': region.name, 'items': {}, 'id': region.id, } if region.is_redlisted: return_dict[region.id]['redlisted'].append('name') return_dict[region.id]['items'][system.id] = { 'redlisted': [], 'name': system.name, 'id': system.id, 'items': {id: location_data_dict[id] for id in systems_dict[system.id][1]}, } if system.is_redlisted: return_dict[region.id]['items'][system.id]['redlisted'].append( 'name') return return_dict
def cache_today_stats(): for region_id in tqdm(all_region): region = Region.get(Region.region_id == region_id) today_stats = get_today_trend(region) if LatestTrend.select().where( LatestTrend.region_id == region_id).exists(): trend = LatestTrend.get(LatestTrend.region_id == region_id) trend.metrics = today_stats with postgres_database.atomic(): trend.save() else: LatestTrend.create(region_id=region_id, metrics=today_stats).save()
def helper_test_assets_success(self, result): for region_id, region_data in result.items(): if region_id > 0: region = Region.get(region_id) self.assertEqual(region_data['name'], region.name) self.assertIsInstance(region_data['items'], dict) for system_id, system_data in region_data['items'].items(): if system_id > 0: system = System.get(system_id) self.assertIsInstance(system_data, dict) self.assertEqual(system_data['name'], system.name) self.assertIsInstance(system_data['items'], dict) for structure_id, structure_data in system_data['items'].items( ): self.assertIsInstance(structure_data, dict) self.assertIsInstance(structure_data['name'], str) self.assertIsInstance(structure_data['items'], dict) for identifier, data in structure_data['items'].items(): self.assertIsInstance(identifier, int) self.helper_process_asset_item(data)
def list_channel(search: str = None, country: str = None, offset=0): channel_query = Channel.select(Channel.channel_id, Channel.title, Channel.description, Channel.country) if search is not None: print(search) search = str(search) channel_query = channel_query.where( Match(Channel.title, search) | Match(Channel.description, search)) if country is not None: print('search country') region = Region.get(Region.region_id == country) channel_query = channel_query.where(Channel.country == region) channels = [] for idx, c in enumerate(channel_query[offset:]): json_data = model_to_dict(c) channels.append(json_data) if idx > 10: break return {'count': len(channels), 'channels': channels}
def test_add_new_region_to_admin_list(self): add_admin_list_item('region', 10000050, current_user=self.admin) new_list = get_admin_list('region', current_user=self.admin) self.assertEqual(len(new_list['info']), 1) self.assertTrue(Region.get(10000050).redlisted)
def trending_topic(region_id, unit: str, search: str = None, start: datetime = None, end: datetime = None, sum: bool = False, topic_limit=100, lw: float = 1, vw: float = 1, cw: float = 1, rw: float = 1, dw: float = 1): today = datetime.now() today = datetime(year=today.year, month=today.month, day=today.day) if end is None: end = today if start is None: start = end - relativedelta(days=unit_value[unit] + 2) print(start, end) region = Region.get(Region.region_id == region_id) result = { 'id': region.region_id, 'name': region.name, 'topic': [], 'geo': { 'lat': region.lat, 'lon': region.lon } } daily_trends = DailyTrend.select().where((DailyTrend.time >= start) & (DailyTrend.time <= end) & (DailyTrend.region == region)) if search is not None and len(search) > 0: exp = NodeList([ SQL("jsonb_message_to_tsvector("), DailyTrend.metrics, SQL(") @@ '{}'".format(search)) ], glue='') daily_trends = daily_trends.where(exp) print('size', len(daily_trends)) daily_metrics = [] for trend in daily_trends: stats = [] for metric in trend.metrics: m_ = metric['stats'] m_['tag'] = metric['tag'].replace('#', '') m_['date'] = trend.time if 'category' not in metric: m_['category'] = [-1] else: m_['category'] = metric['category'] stats.append(m_) df = pd.DataFrame(stats) if len(df) > 0: daily_metrics.append(df) if end >= today: from cache import LatestTrend try: trend = LatestTrend.get(LatestTrend.region_id == region_id) today_stats = trend.metrics except: today_stats = [] stats = [] for metric in today_stats: m_ = metric['stats'] m_['tag'] = metric['tag'].replace('#', '') m_['date'] = today if 'category' not in metric: m_['category'] = [-1] else: m_['category'] = metric['category'] stats.append(m_) if len(stats): df = pd.DataFrame(stats) if len(df) > 0: daily_metrics.append(df) print('m size', len(daily_metrics)) if len(daily_metrics) > 0: df = pd.concat(daily_metrics, axis=0) if search is not None and len(search) > 0: df = df.loc[df['tag'].str.contains(search, regex=False)] df.set_index('tag') df = df.drop(columns=["date"]) if 'category' in df.columns: # df['category'] = [','.join(map(str, l)) for l in df['category']] # df = df.groupby(['tag', 'category'],as_index=False).mean() f2 = lambda x: [z for y in x for z in y] f1 = lambda x: ', '.join(x.dropna()) d = dict.fromkeys( df[['tag', 'category']].columns.difference(['tag', 'category']), f1) d['category'] = f2 df1 = df.groupby('tag', as_index=False).agg(d) df2 = df[['tag', 'rank', 'view', 'comment', 'like', 'dislike']].groupby(['tag'], as_index=False).mean() df = pd.concat([df1.set_index('tag'), df2.set_index('tag')], axis=1, join='inner').reset_index() else: df = df.groupby(['tag'], as_index=False).mean() df['weight'] = (101 - df['rank']) * rw + ( (df['view']) * vw + (df['comment']) * cw + (df['like']) * lw - (df['dislike'] * dw)) / df['view'] # df['tag'] = [ r[0] for r in df.index] # df['category'] = [ r[1] for r in df.index] topics = df.to_dict(orient='records') topics.sort(key=lambda x: x['weight'], reverse=True) result['topic'] = [] for t in topics[:topic_limit]: e = { 'tag': t['tag'], 'weight': t['weight'], 'rank': t['rank'], 'view': t['view'], 'like': t['like'], 'dislike': t['like'], 'comment': t['comment'] } if 'category' in t: e['category'] = list(set(t['category'])) result['topic'].append(e) return result
def topic_filter(region_id: str, unit: str, search: str = None, start: datetime = None, end: datetime = None, topic_limit=100, sum: bool = False, lw: float = 0, vw: float = 0, cw: float = 0, rw: float = 1, dw: float = 0): if unit not in ['week', 'day', 'month', 'year']: raise ValueError("Invalid unit value") today = datetime.now() today = datetime(year=today.year, month=today.month, day=today.day) if end is None: end = today else: end = datetime(year=end.year, month=end.month, day=end.day) if start is None: start = end - relativedelta(days=unit_value[unit] + 2) region = Region.get(Region.region_id == region_id) result = { 'id': region.region_id, 'name': region.name, 'topic': [], 'geo': { 'lat': region.lat, 'lon': region.lon } } daily_trends = DailyTrend.select().where((DailyTrend.time >= start) & (DailyTrend.time <= end) & (DailyTrend.region == region)) if search is not None and len(search) > 0: exp = NodeList([ SQL("jsonb_message_to_tsvector("), DailyTrend.metrics, SQL(") @@ '{}'".format(search)) ], glue='') daily_trends = daily_trends.where(exp) daily_metrics = [] for trend in daily_trends: stats = [] for metric in trend.metrics: m_ = metric['stats'] m_['tag'] = metric['tag'].replace('#', '') m_['date'] = trend.time m_['category'] = metric['category'] stats.append(m_) df = pd.DataFrame(stats) # df['date'] = pd.to_datetime(df['date']) daily_metrics.append(df) if end >= today: from cache import LatestTrend try: trend = LatestTrend.get(LatestTrend.region_id == region_id) today_stats = trend.metrics except: today_stats = [] stats = [] for metric in today_stats: m_ = metric['stats'] m_['date'] = today m_['tag'] = metric['tag'].replace('#', '') if 'category' not in metric: m_['category'] = [-1] else: m_['category'] = metric['category'] stats.append(m_) if len(stats): df = pd.DataFrame(stats) daily_metrics.append(df) if len(daily_metrics) > 0: df = pd.concat(daily_metrics, axis=0) if search is not None and len(search) > 0: df = df.loc[df['tag'].str.contains(search, regex=False)] df.set_index('tag') has_col = False if 'category' in df.columns: df['category'] = [','.join(map(str, l)) for l in df['category']] has_col = True df = df.groupby(['tag', 'date', 'category']).mean() else: df = df.groupby(['tag', 'date']).mean() df['weight'] = (101 - df['rank']) * rw + ( (df['view']) * vw + (df['comment']) * cw + (df['like']) * lw - (df['dislike'] * dw)) / df['view'] df['tag'] = list([r[0] for r in df.index]) df['date'] = list( [r[1].strftime("%Y-%m-%dT%HH:%MM:%SS") for r in df.index]) if has_col: df['category'] = list([[int(float(l)) for l in r[2].split(',')] for r in df.index]) topics = df.to_dict(orient='records') result['topic'] = topics return result
def topic_interest(region_id, unit: str, search: str = None, start: datetime = None, end: datetime = None, sum: bool = False, topic_limit=100, lw: float = 0, vw: float = 0, cw: float = 0, rw: float = 1, dw: float = 0): if unit not in ['week', 'day', 'month', 'year']: raise ValueError("Invalid unit value") region = Region.get(Region.region_id == region_id) result = { 'id': region.region_id, 'name': region.name, 'topic': [], 'geo': { 'lat': region.lat, 'lon': region.lon } } if end is None: end = datetime.now() # end = datetime(year=end.year, month=end.month, day=end.day, hour=end.hour) if start is None: start = end - relativedelta(days=unit_value[unit] + 2) videos = Video.select().where((Video.published >= start) & (Video.published <= end)) # for v in videos: statistic = Stats.select().where((Stats.trending_region == region) & Stats.video.in_(videos)) stats = [] for s in statistic: v = s.video if 'data' not in s.stats: continue sub_stats = s.stats['data'] t = pd.DataFrame(sub_stats) v.tags = extract_video_unique_keyword(v) t['video'] = v stats.append(t) if len(stats) == 0: return result df = pd.concat(stats, axis=0) df['date'] = pd.to_datetime(df['date']) df.set_index('date') df = df[(df['date'] > start) & (df['date'] < end)] tag_data = cluster_stats_date(df, unit) total_weight = 0 for key, data in tag_data.items(): if len(key) > 3 and len(key) < 30: df = pd.DataFrame(data) df['norm_view'] = df['view'] / df['view'].sum() # df['weight'] = (df['like'] + df['dislike'])/df['view'] + ((101-df['rank'])*1000)*df['norm_view'] df['weight'] = (101 - df['rank']) * rw + ( (df['comment'] * cw) + (df['view'] * vw) + (df['like'] * lw) - (df['dislike'] * dw)) / df['view'] interest_weight = df['weight'].mean() total_weight += interest_weight result['topic'].append({ 'tag': key, 'stats': stats, 'category': list(set(df['category'].tolist())), }) result['topic'] = result['topic'][:topic_limit] result['topic'].sort(key=lambda x: x[1], reverse=True) return result