def gallery(): get_photos = models.TPhoto.query.order_by('id_site').all() dump_photos = photo_schema.dump(get_photos) for photo in dump_photos: photo['sm'] = utils.getThumbnail(photo).get('output_name') return jsonify(dump_photos), 200
def returnAllSites(): try: get_all_sites = models.TSite.query.order_by('ref_site').all() sites = site_schema.dump(get_all_sites).data for site in sites: if(site.get('main_photo') == None): set_main_photo = models.TPhoto.query.filter_by( id_photo=site.get('t_photos')[0]) else: set_main_photo = models.TPhoto.query.filter_by( id_photo=site.get('main_photo')) ''' get_photos = models.TPhoto.query.filter( models.TPhoto.id_photo.in_(site.get('t_photos'))) dump_photos = photo_schema.dump(get_photos).data for photo in dump_photos : photo['sm'] = utils.getThumbnail(photo).get('output_name'), site['photos'] = dump_photos ''' main_photo = photo_schema.dump(set_main_photo).data site['main_photo'] = utils.getThumbnail( main_photo[0]).get('output_name') except Exception as exception: return jsonify(error=exception), 400 return jsonify(sites), 200
def returnSiteById(id_site): get_site_by_id = models.TSite.query.filter_by(id_site=id_site) site = site_schema.dump(get_site_by_id) get_photos_by_site = models.TPhoto.query.order_by('filter_date').filter_by( id_site=id_site).all() dump_photos = photo_schema.dump(get_photos_by_site) cor_sthemes_themes = site[0].get('cor_site_stheme_themes') cor_list = [] themes_list = [] subthemes_list = [] for cor in cor_sthemes_themes: cor_list.append(cor.get('id_stheme_theme')) query = models.CorSthemeTheme.query.filter( models.CorSthemeTheme.id_stheme_theme.in_(cor_list)) themes_sthemes = themes_sthemes_schema.dump(query) for item in themes_sthemes: if item.get('dico_theme').get('id_theme') not in themes_list: themes_list.append(item.get('dico_theme').get('id_theme')) if item.get('dico_stheme').get('id_stheme') not in subthemes_list: subthemes_list.append(item.get('dico_stheme').get('id_stheme')) site[0]['themes'] = themes_list site[0]['subthemes'] = subthemes_list for photo in dump_photos: photo['sm'] = utils.getThumbnail(photo).get('output_name'), photos = dump_photos return jsonify(site=site, photos=photos), 200
def gallery(): try: get_photos = models.TPhoto.query.order_by('id_site').all() dump_photos = photo_schema.dump(get_photos).data for photo in dump_photos: photo['sm'] = utils.getThumbnail(photo).get('output_name'), except Exception as exception: return jsonify(error=exception), 400 return jsonify(dump_photos), 200
def main(): logger.info("Starting the refresher task...") count = 0 cursor = conn.cursor(buffered=True) cursor.execute("SELECT guid, title, thumbnail FROM t_animes") datas = cursor.fetchall() logger.info(str(len(datas)) + " medias are going to be checked.") for data in datas: try: image = utils.getThumbnail(data[0]) if (image == data[2]): if (image != ""): logger.debug("Thumbnail for " + str(data[1]) + " already up to date.") else: logger.info("Thumbnail for " + str(data[1]) + " still empty.") else: if (image != ""): cursor.execute( "UPDATE t_animes SET thumbnail = %s WHERE guid = %s", [image, data[0]]) conn.commit() logger.info( "Updated thumbnail found for \"" + str(data[1]) + "\": %s", image) count += 1 else: try: urllib.request.urlopen(data[2]) logger.info("Thumbnail for \"" + str(data[1]) + "\" is now empty, avoiding change.") except: logger.info("Thumbnail for \"" + str(data[1]) + "\" has been deleted!") except Exception as e: logger.warning("Error while updating thumbnail for '" + str(data[1]) + "': " + str(e)) time.sleep(3) logger.info("All thumbnails checked!") cursor.close() logger.info( str(count) + " new thumbnails, time taken: %ss" % round((time.time() - startTime), 2))
def gallery(): get_sites = models.TSite.query.filter_by( publish_site=True).order_by(DEFAULT_SORT_SITES) dump_sites = site_schema.dump(get_sites) #TODO get photos and cities by join on sites query photo_ids = [] sites_without_photo = [] ville_codes = [] for site in dump_sites: photo_id = site.get('main_photo') if photo_id: photo_ids.append(site.get('main_photo')) else: sites_without_photo.append(str(site.get('id_site'))) ville_codes.append(site.get('code_city_site')) query_photos = models.TPhoto.query.filter( models.TPhoto.id_photo.in_(photo_ids)) dump_photos = photo_schema.dump(query_photos) if len(sites_without_photo): sql_missing_photos_str = "select distinct on (id_site) * from geopaysages.t_photo where id_site IN (" + ",".join( sites_without_photo) + ") order by id_site, filter_date desc" sql_missing_photos = text(sql_missing_photos_str) missing_photos_result = db.engine.execute( sql_missing_photos).fetchall() missing_photos = [dict(row) for row in missing_photos_result] for missing_photo in missing_photos: missing_photo['t_site'] = missing_photo.get('id_site') dump_photos.append(missing_photo) query_villes = models.Communes.query.filter( models.Communes.code_commune.in_(ville_codes)) dump_villes = communes_schema.dump(query_villes) for site in dump_sites: print('PHOTO') id_site = site.get('id_site') photo = None try: photo = next(photo for photo in dump_photos if (photo.get('t_site') == id_site)) except StopIteration: pass if photo: site['photo'] = utils.getThumbnail(photo).get('output_url') site['ville'] = next( ville for ville in dump_villes if (ville.get('code_commune') == site.get('code_city_site'))) return render_template('gallery.html', sites=dump_sites)
def getPhoto(photo): date_diplay = {} date_approx = photo.get('date_photo') filter_date = photo.get('filter_date') if date_approx: date_diplay = {'md': date_approx, 'sm': date_approx} else: date_obj = datetime.strptime(filter_date, '%Y-%m-%d') date_diplay = { 'md': format_datetime(date_obj, 'yyyy (dd MMMM)'), 'sm': date_obj.strftime('%Y') } captions = [] licence_photo = photo.get('dico_licence_photo') if licence_photo: captions.append(licence_photo.get('name_licence_photo')) """ author = photo.get('t_role') if author: captions.append('%s %s' % ( photo.get('t_role').get('prenom_role'), photo.get('t_role').get('nom_role') )) """ caption = ' | '.join(captions) dl_caption = "%s | %s | réf. : %s | %s" % ( site.get('name_site'), site.get('ville').get('nom_commune'), site.get('ref_site'), date_diplay.get('md')) if caption: dl_caption = '%s | %s' % (dl_caption, caption) if COMPARATOR_VERSION == 1: return { 'id': photo.get('id_photo'), 'sm': utils.getThumbnail(photo).get('output_url'), 'md': utils.getMedium(photo).get('output_url'), 'lg': utils.getLarge(photo, caption).get('output_url'), 'dl': utils.getDownload(photo, dl_caption).get('output_url'), 'date': photo.get('filter_date'), 'date_diplay': date_diplay } return { 'id': photo.get('id_photo'), 'filename': photo.get('path_file_photo'), 'shot_on': photo.get('filter_date'), 'date_diplay': date_diplay, 'caption': caption }
async def update_thumbnail_catalog(asyncioloop): logger.info("Starting up update_thumbnail_catalog") while not client.is_closed(): await asyncio.sleep(43200) logger.info("Automatic check of the thumbnail database on going...") reload = 0 cursor = conn.cursor(buffered=True) cursor.execute("SELECT guid, title, thumbnail FROM t_animes") data = cursor.fetchone() while data is not None: try: if (data[2] != "") : urllib.request.urlopen(data[2]) else: reload = 1 except urllib.error.HTTPError as e: logger.warning("HTTP Error while getting the current thumbnail of '" + str(data[1]) + "': " + str(e)) reload = 1 except Exception as e: logger.debug("Error while getting the current thumbnail of '" + str(data[1]) + "': " + str(e)) if (reload == 1) : try: image = utils.getThumbnail(data[0]) cursor.execute("UPDATE t_animes SET thumbnail = %s WHERE guid = %s", [image, data[0]]) conn.commit() logger.info("Updated thumbnail found for \"" + str(data[1]) + "\": %s", image) except Exception as e: logger.warning("Error while downloading updated thumbnail for '" + str(data[1]) + "': " + str(e)) await asyncio.sleep(3) data = cursor.fetchone() cursor.close() logger.info("Thumbnail database checked.")
def returnAllSites(): get_all_sites = models.TSite.query.order_by('ref_site').all() sites = site_schema.dump(get_all_sites) for site in sites: if len(site.get("t_photos")) > 0: if site.get('main_photo') == None: first_photo = site.get("t_photos") main_photo = models.TPhoto.query.filter_by( id_photo=first_photo[0]).one_or_none() else: main_photo = models.TPhoto.query.filter_by( id_photo=site.get('main_photo')).one_or_none() if main_photo: photo_schema = models.TPhotoSchema() main_photo = photo_schema.dump(main_photo) site['main_photo'] = utils.getThumbnail(main_photo).get( 'output_name') else: site["main_photo"] = "no_photo" else: site["main_photo"] = "no_photo" return jsonify(sites)
def map(): sites=site_schema.dump(models.TSite.query.filter_by(publish_site = True).order_by('name_site')).data for site in sites: cor_sthemes_themes = site.get('cor_site_stheme_themes') cor_list = [] themes_list = [] subthemes_list = [] for cor in cor_sthemes_themes: cor_list.append(cor.get('id_stheme_theme')) query = models.CorSthemeTheme.query.filter( models.CorSthemeTheme.id_stheme_theme.in_(cor_list)) themes_sthemes = themes_sthemes_schema.dump(query).data for item in themes_sthemes: if item.get('dico_theme').get('id_theme') not in themes_list: themes_list.append(item.get('dico_theme').get('id_theme')) if item.get('dico_stheme').get('id_stheme') not in subthemes_list: subthemes_list.append(item.get('dico_stheme').get('id_stheme')) get_photos_by_site = models.TPhoto.query.filter_by( id_site=site.get('id_site')) photos = photo_schema.dump(get_photos_by_site).data site['link'] = url_for('main.comparator', id_site=site.get('id_site'), _external=True) site['latlon'] = site.get('geom') site['themes'] = themes_list site['subthemes'] = subthemes_list site['township'] = site.get('code_city_site') site['years'] = set() for photo in photos: year = str(photo.get('filter_date')).split('-')[0] site['years'].add(year) photo['year'] = year photo['url'] = url_for( 'static', filename=DATA_IMAGES_PATH + utils.getThumbnail(photo).get('output_name')) site['years'] = list(site['years']) site['photos'] = photos subthemes = dicostheme_schema.dump(models.DicoStheme.query.all()).data for sub in subthemes: themes_of_subthemes = [] for item in sub.get('cor_stheme_themes'): themes_of_subthemes.append(item.get('id_theme')) sub['themes'] = themes_of_subthemes filters = [{ 'name': 'themes', 'label': gettext(u'map.filter.themes'), 'items': set() }, { 'name': 'subthemes', 'label': gettext(u'map.filter.subthemes'), 'items': set() }, { 'name': 'township', 'label': gettext(u'map.filter.township'), 'items': set() }, { 'name': 'years', 'label': gettext(u'map.filter.years'), 'items': set() }] for site in sites: # Compute the prop years site['years'] = set() for photo in site.get('photos'): site['years'].add(photo.get('year')) site['years'] = list(site['years']) for filter in filters: val = site.get(filter.get('name')) if isinstance(val, (list, set)): filter.get('items').update(val) else: filter.get('items').add(val) themes = dicotheme_schema.dump(models.DicoTheme.query.all()).data themes = [{ 'id': item['id_theme'], 'label': item['name_theme'] } for item in themes] subthemes = [{ 'id': item['id_stheme'], 'label': item['name_stheme'], 'themes': item['themes'] } for item in subthemes] filter_township = [ filter for filter in filters if filter.get('name') == 'township'][0] str_map_in = ["'" + township + "'" for township in filter_township.get('items')] sql_map_str = "SELECT code_commune AS id, nom_commune AS label FROM geopaysages.communes WHERE code_commune IN (" + ",".join( str_map_in) + ")" sql_map = text(sql_map_str) townships_result = db.engine.execute(sql_map).fetchall() townships = [dict(row) for row in townships_result] for site in sites: site['ville'] = next(township for township in townships if township.get('id') == site.get('township')) dbs = { 'themes': themes, 'subthemes': subthemes, 'township': townships } def getItem(name, id): return next(item for item in dbs.get(name) if item.get('id') == id) for filter in filters: if (filter.get('name') == 'years'): filter['items'] = [{ 'label': str(year), 'id': year } for year in filter.get('items')] filter['items'] = sorted(filter['items'], key=lambda k: k['label'], reverse=True) else: filter['items'] = [getItem(filter.get('name'), item_id) for item_id in filter.get('items')] filter['items'] = sorted(filter['items'], key=lambda k: k['label']) return render_template('map.html', filters=filters, sites=sites, ign_Key=IGN_KEY)
async def background_check_feed(asyncioloop): logger.info("Starting up background_check_feed") # We configure the http header http_headers = { "User-Agent": "MyAnimeBot Discord Bot v" + VERSION, } await client.wait_until_ready() logger.debug("Discord client connected, unlocking background_check_feed...") while not client.is_closed(): try: db_user = conn.cursor(buffered=True) db_user.execute("SELECT mal_user, servers FROM t_users") data_user = db_user.fetchone() except Exception as e: logger.critical("Database unavailable! (" + str(e) + ")") quit() while data_user is not None: user=data_user[0] stop_boucle = 0 feed_type = 1 logger.debug("checking user: "******"GET", "https://myanimelist.net/rss.php?type=rm&u=" + user, headers=http_headers) media = "manga" else : http_response = await httpclient.request("GET", "https://myanimelist.net/rss.php?type=rw&u=" + user, headers=http_headers) media = "anime" except Exception as e: logger.error("Error while loading RSS (" + str(feed_type) + ") of '" + user + "': " + str(e)) break http_data = await http_response.read() feed_data = feedparser.parse(http_data) for item in feed_data.entries: pubDateRaw = datetime.strptime(item.published, '%a, %d %b %Y %H:%M:%S %z').astimezone(timezone) DateTimezone = pubDateRaw.strftime("%z")[:3] + ':' + pubDateRaw.strftime("%z")[3:] pubDate = pubDateRaw.strftime("%Y-%m-%d %H:%M:%S") cursor = conn.cursor(buffered=True) cursor.execute("SELECT published, title, url FROM t_feeds WHERE published=%s AND title=%s AND user=%s", [pubDate, item.title, user]) data = cursor.fetchone() if data is None: var = datetime.now(timezone) - pubDateRaw logger.debug(" - " + item.title + ": " + str(var.total_seconds())) if var.total_seconds() < secondMax: logger.info(user + ": Item '" + item.title + "' not seen, processing...") if item.description.startswith('-') : if feed_type == 1 : item.description = "Re-Reading " + item.description else : item.description = "Re-Watching " + item.description cursor.execute("SELECT thumbnail FROM t_animes WHERE guid=%s LIMIT 1", [item.guid]) data_img = cursor.fetchone() if data_img is None: try: image = utils.getThumbnail(item.link) logger.info("First time seeing this " + media + ", adding thumbnail into database: " + image) except Exception as e: logger.warning("Error while getting the thumbnail: " + str(e)) image = "" cursor.execute("INSERT INTO t_animes (guid, title, thumbnail, found, discoverer, media) VALUES (%s, %s, %s, NOW(), %s, %s)", [item.guid, item.title, image, user, media]) conn.commit() else: image = data_img[0] type = item.description.partition(" - ")[0] cursor.execute("INSERT INTO t_feeds (published, title, url, user, found, type) VALUES (%s, %s, %s, %s, NOW(), %s)", (pubDate, item.title, item.guid, user, type)) conn.commit() for server in data_user[1].split(","): db_srv = conn.cursor(buffered=True) db_srv.execute("SELECT channel FROM t_servers WHERE server = %s", [server]) data_channel = db_srv.fetchone() while data_channel is not None: for channel in data_channel: await send_embed_wrapper(asyncioloop, channel, client, build_embed(user, item, channel, pubDateRaw, image)) data_channel = db_srv.fetchone() if feed_type == 1: feed_type = 0 await asyncio.sleep(1) else: stop_boucle = 1 except Exception as e: logger.error("Error when parsing RSS for '" + user + "': " + str(e)) await asyncio.sleep(1) data_user = db_user.fetchone()