def get_timestamp(self): db_inst = PGClient(self.db, timeout=False) cur = db_inst.execute('select max(udate) from %s' % (self.tablename, )) timestamp = cur.fetchone()[0] if timestamp is None: timestamp = datetime.datetime.min return timestamp
def get_karaokes_content(karaokes_row, db_inst=None): """ Get karaokes content by karaoke id from given row data. [Arguments] karaokes_row : list. Movie rows data. db_inst : DB client instance. Set for reuse client. [Return] content: dict Karaoke content row data with key is karaoke id. """ if not db_inst: db_inst = PGClient() id_sqls = [] for row in karaokes_row: value = row.get('keymd5', None) if value: id_sqls.append("keymd5='%s'" % value) if not id_sqls: return {} cond_sql = "source='youtube' AND (%s)" % ' OR '.join(id_sqls) # FIXME: If movie has other source. content, rowcount = db_inst.query(table='song_content', condition=[cond_sql], ret_type='all') ret = defaultdict(list) for row in content: ret[row['keymd5']].append(row) # FIXME: It may need sorting. return ret
def get_movies_content(movies_row, db_inst=None): """ Get movies content by imdb id from given row data. [Arguments] movies_row : list. Movie rows data. db_inst : DB client instance. Set for reuse client. [Return] content: dict Movie content row data with key is imdb id. """ if not db_inst: db_inst = PGClient() id_sqls = [] for row in movies_row: imdb_id = row.get('imdbid', None) if imdb_id: id_sqls.append("imdbid='%s'" % imdb_id) if not id_sqls: return {} cond_sql = "source='tudou' AND (%s)" % ' OR '.join(id_sqls) # FIXME: If movie has other source. content, rowcount = db_inst.query(table='movie_content', condition=[cond_sql], ret_type='all') ret = defaultdict(list) for row in content: ret[row['imdbid']].append(row) # FIXME: It may need sorting. return ret
def add_count(self, url, params, request, response): """ Add user action count of choosen AV. [Arguments] url[0] : string. AV ID. type : string. Action type. """ # API args av_id = url[0] if len(url) else None action_type = params.get_string('type', None) if not action_type or action_type not in [ USER_ACTION_VIEW, USER_ACTION_DOWNLOAD, USER_ACTION_PLAY ]: raise RequestError(ERR_REQUEST_ARG, str_data=('type error.')) db_inst = PGClient(db='av') cur, rowcount = db_inst.query(table='video', condition=['id=%(id)s', { 'id': av_id }]) if not rowcount: raise RequestError(ERR_DB_NOT_FOUND) count, total = self._add_user_action(action_type=action_type, av_id=av_id, db_inst=db_inst) db_inst.commit() return Response(content={'count': count, 'total': total})
def get_movies_content(movies_row, db_inst=None): """ Get movies content by imdb id from given row data. [Arguments] movies_row : list. Movie rows data. db_inst : DB client instance. Set for reuse client. [Return] content: dict Movie content row data with key is imdb id. """ if not db_inst: db_inst = PGClient() id_sqls = [] for row in movies_row: imdb_id = row.get('imdbid', None) if imdb_id: id_sqls.append("imdbid='%s'" % imdb_id) if not id_sqls: return {} cond_sql = "source='tudou' AND (%s)" % ' OR '.join( id_sqls) # FIXME: If movie has other source. content, rowcount = db_inst.query(table='movie_content', condition=[cond_sql], ret_type='all') ret = defaultdict(list) for row in content: ret[row['imdbid']].append(row) # FIXME: It may need sorting. return ret
def get_addon_version(self, url, params, request, response): """ Get addon version. """ # API args version = url[0] if len(url) else None db_inst = PGClient(db='file') if version: addons, rowcount = db_inst.query( table='addons', fields=['version', 'md5sum'], condition=[u'version=%(version)s', { 'version': version }], ret_type='all') else: # latest version addons, rowcount = db_inst.query( table='addons', fields=['version', 'md5sum'], ret_type='all', else_sql='ORDER BY version DESC LIMIT 1') if not rowcount: raise RequestError(ERR_DB_NOT_FOUND) return Response({ 'version': addons[0]['version'], 'md5sum': addons[0]['md5sum'] })
def get_karaokes_content(karaokes_row, db_inst=None): """ Get karaokes content by karaoke id from given row data. [Arguments] karaokes_row : list. Movie rows data. db_inst : DB client instance. Set for reuse client. [Return] content: dict Karaoke content row data with key is karaoke id. """ if not db_inst: db_inst = PGClient() id_sqls = [] for row in karaokes_row: value = row.get('keymd5', None) if value: id_sqls.append("keymd5='%s'" % value) if not id_sqls: return {} cond_sql = "source='youtube' AND (%s)" % ' OR '.join( id_sqls) # FIXME: If movie has other source. content, rowcount = db_inst.query(table='song_content', condition=[cond_sql], ret_type='all') ret = defaultdict(list) for row in content: ret[row['keymd5']].append(row) # FIXME: It may need sorting. return ret
def add_count(self, url, params, request, response): """ Add user action count of choosen dramas. [Arguments] url[0] : string. Drama ID. type : string. Action type. """ # API args drama_id = url[0] if len(url) else None action_type = params.get_string('type', None) if not action_type or action_type not in [USER_ACTION_VIEW, USER_ACTION_DOWNLOAD, USER_ACTION_PLAY]: raise RequestError(ERR_REQUEST_ARG, str_data=('type error.')) db_inst = PGClient() cur, rowcount = db_inst.query(table='drama', condition=['id=%(id)s', {'id': drama_id}]) if not rowcount: raise RequestError(ERR_DB_NOT_FOUND) count, total = self._add_user_action(action_type=action_type, drama_id=drama_id, db_inst=db_inst) db_inst.commit() return Response(content={ 'count': count, 'total': total })
def suggest_keyword(media_id, lang, db_inst=None): if not db_inst: db_inst = PGClient() # Get target movies info condition = ['id=%(id)s', {'id': media_id}] rows, rowcount = db_inst.query( table='movies', fields=['source, title, imdbid'], condition=condition, ret_type='all' ) if not rowcount: return [] if rows[0]['imdbid']: # if it has imdbid, then here attempt to get other source data. condition = ['imdbid=%(imdbid)s AND id!=%(id)s', {'imdbid': rows[0]['imdbid'], 'id': media_id}] other_rows, rowcount = db_inst.query( table='movies', fields=['source, title'], condition=condition, ret_type='all' ) rows.extend(other_rows) # Filter data by lang rules. filter_list = { LANG_ENG: [DB_SRC_IMDB], LANG_SCH: [DB_SRC_IMDB, DB_SRC_DOUBAN], LANG_TCH: [DB_SRC_IMDB, DB_SRC_DOUBAN, DB_SRC_ATMOVIES] }[lang] return [row['title'] for row in rows if row['source'] in filter_list]
def get_movie_titles(self, url, params, request, response): """ Get movie titles of each source. [Arguments] url[0] : string. IMDB ID. """ # API args if not url: raise RequestError(ERR_REQUEST_ARG, str_data=('format error.')) imdbid = url[0] db_inst = PGClient() condition = ['imdbid = %(imdbid)s', {'imdbid': imdbid}] rows, rowcount = db_inst.query(table='movies', fields=['source, title'], condition=condition, ret_type='all') return Response( content={ 'titles': { source2lang(row['source']): row['title'] for row in rows if source2lang(row['source']) is not None } })
def suggest_keyword(media_id, lang, db_inst=None): if not db_inst: db_inst = PGClient(db='av') condition = ['id=%(id)s', {'id': media_id}] rows, rowcount = db_inst.query(table='video', fields=['title, code'], condition=condition, ret_type='all') return rows.pop() if rowcount else []
def get_metadata(self, url, params, request, response): """ Use TV id of any source to get all metadata. [Arguments] ids : list string. IDs of TVs to get metadata. """ # API args ids = params.get_list('ids', None) if not ids: raise RequestError(ERR_REQUEST_ARG, str_data=('ids.')) resp = Response(content={'tvs': {}}) db_inst = PGClient() # Get items with ids condition = ['id IN %(id)s', {'id': tuple(ids)}] tvs, rowcount = db_inst.query(table='drama', condition=condition, ret_type='all') for tv in tvs: resp['tvs'][tv['id']] = {'dbid': tv['dbid'], 'metadata': {}} # Get metadata of each source. if tv['dbid']: condition = ['dbid = %(id)s', {'id': tv['dbid']}] rows, rowcount = db_inst.query(table='drama', condition=condition, ret_type='all') else: # only one record rows = [tv] for row in rows: resp['tvs'][tv['id']]['metadata'][row['source']] = { 'id': row['id'], 'source': row['source'], 'title': row['title'], 'akas': row['akas'], 'kind': row['kind'], 'genres': row['genres'], 'posterurl': tv['posterurl'], 'stars': row['stars'], 'year': row['year'], 'region': row['region'], 'description': row['description'], 'url': row['url'], 'update_eps': row['update_eps'], 'total_eps': row['total_eps'], 'completed': row['completed'], 'md5sum': row['md5sum'] } # patch up not found data. for missing_id in set(ids).difference(resp['tvs'].keys()): resp['tvs'][missing_id] = {} return resp
def get_metadata(self, url, params, request, response): """ Use movie id of any source to get all metadata. [Arguments] ids : list string. IDs of movies to get metadata. """ # API args ids = params.get_list('ids', None) if not ids: raise RequestError(ERR_REQUEST_ARG, str_data=('ids.')) resp = Response(content={'movies': {}}) db_inst = PGClient() # Get items with ids condition = ['id IN %(id)s', {'id': tuple(ids)}] movies, rowcount = db_inst.query(table='movies', condition=condition, ret_type='all') for movie in movies: resp['movies'][movie['id']] = {'imdbid': movie['imdbid'], 'metadata': {}} # Get metadata of each source. if movie['imdbid']: condition = ['imdbid = %(imdbid)s', {'imdbid': movie['imdbid']}] rows, rowcount = db_inst.query(table='movies', condition=condition, ret_type='all') else: rows = [movie] for row in rows: resp['movies'][movie['id']]['metadata'][row['source']] = { 'id': row['id'], 'source': row['source'], 'title': row['title'], 'akas': row['akas'], 'genres': row['genres'], 'rating': row['rating'], 'posterurl': row['posterurl'], 'directors': row['directors'], 'stars': row['stars'], 'releasedate': row['releasedate'], 'countries': row['countries'], 'description': row['description'], 'url': row['url'], 'md5sum': row['md5sum'], 'thumbnailurl': row['thumbnailurl'] } # patch up not found data. for missing_id in set(ids).difference(resp['movies'].keys()): resp['movies'][missing_id] = {} return resp
def get_play_list(self, url, params, request, response): """ Get play list. [Arguments] url[0] : string. Drama ID. source : Video source. since : int string. Response rows start from. limit : int string. Number of response rows to display. """ # API args drama_id = url[0] if len(url) else None source = params.get_string('source', None) since = params.get_int('since', 0) limit = params.get_limit('limit', 30) if not drama_id: raise RequestError(http_status=404) condition = ['id=%(id)s', {'id': drama_id}] if source in self.allow_display_play_source(): fields = [ ''' ( ARRAY(SELECT json_array_elements(play_urls->'%s')) )[%s:%s] AS play_list ''' % ( source, since + 1 if since > 0 else 1, limit + since if limit > 0 else "json_array_length(play_urls->'%s')" % source # limit it if need. ) ] else: raise RequestError(ERR_REQUEST_ARG, str_data=('source.')) db_inst = PGClient() dramas, rowcount = db_inst.query(table='drama', fields=fields, condition=condition, ret_type='all') if not rowcount: raise RequestError(http_status=404) return Response(model_data={'data': dramas[0]})
def search_tvs(keyword, file_filter, keyword_filter, ordering=0, since=None, limit=10, lang=None): """ Search dramas information. [Arguments] keyword : list. Search keyword. file_filter : int. (Not implement) File filter. keyword_filter : list. Keyword filter. ordering : int. Ordering rule. since : int. Response data start from. limit : int. Number of response rows to display. lang : string. To choose display content. """ db_inst = PGClient() # Compose SQL if ordering == 1: ordering_sql = u"ORDER BY rdate ASC NULLS LAST" elif ordering == 2: ordering_sql = u"ORDER BY total_count DESC NULLS LAST, rdate DESC NULLS LAST" elif ordering == 3: ordering_sql = u"ORDER BY total_count ASC NULLS LAST, rdate ASC" else: ordering_sql = u"ORDER BY rdate DESC NULLS LAST" sql = u''' {0} {1} {2} {3} '''.format( get_search_sql_without_order(lang, keyword, keyword_filter), ordering_sql if ordering_sql else '', 'OFFSET %s' % since if since else '', 'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS) dramas = db_inst.execute(cmd=gen_fetch_dramas_sql( dramas_sql=sql)).fetchall() return {'metadata': dramas}
def get_metadata(self, url, params, request, response): """ Use AV id of any source to get all metadata. [Arguments] ids : list string. IDs of AVs to get metadata. """ # API args ids = params.get_list('ids', None) if not ids: raise RequestError(ERR_REQUEST_ARG, str_data=('ids.')) resp = Response(content={'avs': {}}) db_inst = PGClient(db='av') condition = ['id IN %(id)s', {'id': tuple(ids)}] avs, rowcount = db_inst.query(table='video', condition=condition, ret_type='all') for av in avs: resp['avs'][av['id']] = {'metadata': {}} resp['avs'][av['id']]['metadata'][DB_SRC_DMM] = { 'id': av['id'], 'source': DB_SRC_DMM, 'title': av['title'], 'posterurl': av['posterurl'], 'duration': av['duration'], 'stars': av['performer'], 'genres': av['category'], 'rating': av['rating'], 'maker': av['maker'], 'series': av['series'], 'date': av['date'] and av['date'].strftime("%Y-%m-%d %H:%M:%S"), 'description': av['description'], 'samples': av['samples'], 'url': av['url'], 'date2': av['date2'] and av['date2'].strftime("%Y-%m-%d %H:%M:%S"), 'code': av['code'], 'md5sum': av['md5sum'] } # patch up not found data. for missing_id in set(ids).difference(resp['avs'].keys()): resp['avs'][missing_id] = {} return resp
def update_latest_movies(self, url, params, request, response): """ Update latest movies. [Arguments] imdbid_list: list The imdbid list to update latest table. [Return] rowcount: int The row count of 'movie_latest'. """ imdbid_list = params.get_list('imdbid_list', None) # check argument 'imdbid_list' if not imdbid_list: raise RequestError(ERR_REQUEST_ARG, str_data=("argument 'imdbid_list' is need.",)) values = '' for data in imdbid_list: # check imdbid pattern reslut = re.match('tt\d{7}', data[0]) if reslut is None: raise RequestError(ERR_REQUEST_ARG, str_data=('imdbid_list(%s) is not imdbid format.' % data[0],)) values += "('%s', '%s')," % (data[0], data[1]) values = values[:-1] # insert db db_inst = PGClient() db_inst.execute('truncate table movie_latest') db_inst.execute('insert into movie_latest values %s' % values) db_inst.commit() cur = db_inst.execute('select count(*) from movie_latest') rowcount = cur.fetchone()[0] return Response(model_data={'rowcount': rowcount})
def get_play_list(self, url, params, request, response): """ Get play list. [Arguments] url[0] : string. Drama ID. source : Video source. since : int string. Response rows start from. limit : int string. Number of response rows to display. """ # API args drama_id = url[0] if len(url) else None source = params.get_string('source', None) since = params.get_int('since', 0) limit = params.get_limit('limit', 30) if not drama_id: raise RequestError(http_status=404) condition = ['id=%(id)s', {'id': drama_id}] if source in self.allow_display_play_source(): fields = [''' ( ARRAY(SELECT json_array_elements(play_urls->'%s')) )[%s:%s] AS play_list ''' % ( source, since+1 if since > 0 else 1, limit+since if limit > 0 else "json_array_length(play_urls->'%s')" % source # limit it if need. ) ] else: raise RequestError(ERR_REQUEST_ARG, str_data=('source.')) db_inst = PGClient() dramas, rowcount = db_inst.query(table='drama', fields=fields, condition=condition, ret_type='all') if not rowcount: raise RequestError(http_status=404) return Response(model_data={ 'data': dramas[0] })
def get_metadata(self, url, params, request, response): """ Use karaokes id of any source to get all metadata. [Arguments] ids : list string. IDs of karaokes to get metadata. """ # API args ids = params.get_list('ids', None) if not ids: raise RequestError(ERR_REQUEST_ARG, str_data=('ids.')) resp = Response(content={'karaokes': {}}) db_inst = PGClient(db='karaoke') condition = ['keymd5 IN %(id)s', {'id': tuple(ids)}] karaokes, rowcount = db_inst.query(table='songs', condition=condition, ret_type='all') for karaoke in karaokes: resp['karaokes'][karaoke['keymd5']] = {'metadata': {}} # Get content data. condition = ['keymd5 = %(id)s', {'id': karaoke['keymd5']}] rows, rowcount = db_inst.query(table='song_content', condition=condition, ret_type='all') content = rows[0] if rows else {} # Key name use lang instead of source name and we only have TCH data, # because TCH dad is composed of cashbox, holiday and youtube. resp['karaokes'][karaoke['keymd5']]['metadata'][LANG_TCH] = { 'id': karaoke['keymd5'], 'source': karaoke['source'], 'title': karaoke['title'], 'artist': karaoke['artist'], 'song_lang': karaoke['lang'], # use content 'posterurl': content['poster_url'], 'description': content['description'], 'play_url': content['play_url'], 'md5sum': content['md5sum'] } # patch up not found data. for missing_id in set(ids).difference(resp['karaokes'].keys()): resp['karaokes'][missing_id] = {} return resp
def search_tvs(keyword, file_filter, keyword_filter, ordering=0, since=None, limit=10, lang=None): """ Search dramas information. [Arguments] keyword : list. Search keyword. file_filter : int. (Not implement) File filter. keyword_filter : list. Keyword filter. ordering : int. Ordering rule. since : int. Response data start from. limit : int. Number of response rows to display. lang : string. To choose display content. """ db_inst = PGClient() # Compose SQL if ordering == 1: ordering_sql = u"ORDER BY rdate ASC NULLS LAST" elif ordering == 2: ordering_sql = u"ORDER BY total_count DESC NULLS LAST, rdate DESC NULLS LAST" elif ordering == 3: ordering_sql = u"ORDER BY total_count ASC NULLS LAST, rdate ASC" else: ordering_sql = u"ORDER BY rdate DESC NULLS LAST" sql = u''' {0} {1} {2} {3} '''.format( get_search_sql_without_order(lang, keyword, keyword_filter), ordering_sql if ordering_sql else '', 'OFFSET %s' % since if since else '', 'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS ) dramas = db_inst.execute(cmd=gen_fetch_dramas_sql(dramas_sql=sql)).fetchall() return { 'metadata': dramas }
def delete_addon(self, url, params, request, response): """ delete addon file. [Arguments] url[0] : string. Addon version of file to remove. """ # API args if len(url): version = url[0] else: raise RequestError(ERR_REQUEST_ARG, str_data=('version error.')) db_inst = PGClient(db='file', timeout=False) # check data exist cur, rowcount = db_inst.query( table='addons', fields=['version'], condition=[u'version=%(version)s', { 'version': version }]) if not rowcount: raise RequestError(ERR_REQUEST_ARG, str_data=('data does not exist.')) db_inst.remove( table='addons', condition=[u'version=%(version)s', { 'version': version }]) db_inst.commit() return Response({'version': version})
def search_total(keyword, lang=None, db_inst=None): keyword = list(set(keyword)) if not db_inst: db_inst = PGClient(db='karaoke') # Compose SQL keyword_sql = u' OR '.join( [u"songs.title ILIKE '%{0}%' OR songs.artist ILIKE '%{0}%'".format(escape_wildcard(k)) for k in keyword]) sql = u''' SELECT COUNT(*) FROM songs, song_content WHERE songs.keymd5=song_content.keymd5 AND ({0}) '''.format( keyword_sql ) return db_inst.execute(cmd=sql).fetchone()[0]
def search_total(keyword, lang=None, db_inst=None): keyword = list(set(keyword)) if not db_inst: db_inst = PGClient(db='karaoke') # Compose SQL keyword_sql = u' OR '.join([ u"songs.title ILIKE '%{0}%' OR songs.artist ILIKE '%{0}%'".format( escape_wildcard(k)) for k in keyword ]) sql = u''' SELECT COUNT(*) FROM songs, song_content WHERE songs.keymd5=song_content.keymd5 AND ({0}) '''.format(keyword_sql) return db_inst.execute(cmd=sql).fetchone()[0]
def search_total(keyword, lang=None, db_inst=None): keyword = list(set(keyword)) if not db_inst: db_inst = PGClient(db='av') # Compose SQL keyword_sql = u' OR '.join([ u"keywords ILIKE '%{0}%'".format(escape_wildcard(k)) for k in keyword ]) sql = u''' SELECT COUNT(*) FROM video, video_keyword WHERE ({0}) AND video.id=video_keyword.id AND video.date <= '{1}' '''.format(keyword_sql, datetime.utcnow().isoformat().split('T')[0]) return db_inst.execute(cmd=sql).fetchone()[0]
def get_addon(self, url, params, request, response): """ Get addon file. [Arguments] url[0] : string. Addon version. """ # API args if len(url): version = url[0] else: raise RequestError(ERR_REQUEST_ARG, str_data=('version error.')) # Compose cache file path file_name = 'CEC_%s.addon' % version cache_file_path = '%s/%s' % (FILE_CACHE_PATH, file_name) path_to_save = None # Save file if this file does not cached. # Get file if os.path.isfile(cache_file_path): # Take cached file if it found. iter_data = open(cache_file_path) else: # get file and cache it db_inst = PGClient(db='file', timeout=False) addons, rowcount = db_inst.query( table='addons', condition=[u'version=%(version)s', { 'version': version }], ret_type='all') if not rowcount: raise RequestError(ERR_DB_NOT_FOUND) iter_data = iter( addons[0] ['data']) # covert it to iter obj for avoiding multiple use. path_to_save = cache_file_path response.stream = True response.headers['Content-Type'] = 'application/octet-stream' response.headers[ 'Content-Disposition'] = 'attachment; filename="%s"' % file_name return streaming_generator(iter_data=iter_data, file_name=path_to_save)
def search_total(keyword, lang=None, db_inst=None): keyword = list(set(keyword)) if not db_inst: db_inst = PGClient(db='av') # Compose SQL keyword_sql = u' OR '.join( [u"keywords ILIKE '%{0}%'".format(escape_wildcard(k)) for k in keyword]) sql = u''' SELECT COUNT(*) FROM video, video_keyword WHERE ({0}) AND video.id=video_keyword.id AND video.date <= '{1}' '''.format( keyword_sql, datetime.utcnow().isoformat().split('T')[0] ) return db_inst.execute(cmd=sql).fetchone()[0]
def search_total(keyword, lang=None, db_inst=None): """ Search dramas count. [Arguments] keyword : list. Search keyword. lang : string. To choose display content. """ if not db_inst: db_inst = PGClient() # Compose SQL sql = u''' select count(*) from ( {0} ) as data '''.format(get_search_sql_without_order(lang, keyword, None)) return db_inst.execute(cmd=sql).fetchone()[0]
def search_total(keyword, lang=None, db_inst=None): keyword = list(set(keyword)) if not db_inst: db_inst = PGClient() # Compose SQL sql_data = {} keyword_sql = _gen_keyword_sql(keyword, sql_data) sql = u''' SELECT COUNT(*) FROM movies WHERE id IN ( SELECT id FROM movie_keyword WHERE source = '{1}' AND ({0}) @@ j_tsv ) '''.format(keyword_sql, lang2source(lang)) return db_inst.execute(cmd=sql, data=sql_data).fetchone()[0]
def search_total(keyword, lang=None, db_inst=None): """ Search dramas count. [Arguments] keyword : list. Search keyword. lang : string. To choose display content. """ if not db_inst: db_inst = PGClient() # Compose SQL sql = u''' select count(*) from ( {0} ) as data '''.format( get_search_sql_without_order(lang, keyword, None) ) return db_inst.execute(cmd=sql).fetchone()[0]
def delete_addon(self, url, params, request, response): """ delete addon file. [Arguments] url[0] : string. Addon version of file to remove. """ # API args if len(url): version = url[0] else: raise RequestError(ERR_REQUEST_ARG, str_data=('version error.')) db_inst = PGClient(db='file', timeout=False) # check data exist cur, rowcount = db_inst.query( table='addons', fields=['version'], condition=[u'version=%(version)s', {'version': version}] ) if not rowcount: raise RequestError(ERR_REQUEST_ARG, str_data=('data does not exist.')) db_inst.remove(table='addons', condition=[u'version=%(version)s', {'version': version}]) db_inst.commit() return Response({ 'version': version })
def add_count(self, url, params, request, response): """ Add user action count of choosen karaoke. [Arguments] url[0] : string. Karaoke ID. type : string. Action type. """ # API args karaoke_id = url[0] if len(url) else None id_list = params.get_list('ids', []) action_type = params.get_string('type', None) if not action_type or action_type not in [USER_ACTION_VIEW, USER_ACTION_DOWNLOAD, USER_ACTION_PLAY]: raise RequestError(ERR_REQUEST_ARG, str_data=('type error.')) if karaoke_id: id_list = [karaoke_id] elif not id_list: raise RequestError(ERR_REQUEST_ARG, str_data=('id_list error.')) db_inst = PGClient(db='karaoke') cur, rowcount = db_inst.query(table='songs', condition=['keymd5 IN %(ids)s', {'ids': tuple(id_list)}]) if rowcount != len(id_list): raise RequestError(ERR_DB_NOT_FOUND) resp = Response() for ID in id_list: count, total = self._add_user_action(action_type=action_type, karaoke_id=ID, db_inst=db_inst) resp[ID] = { 'count': count, 'total': total } db_inst.commit() return resp.pop(karaoke_id) if karaoke_id else resp
def get_covers(self, url, params, request, response): """ Get karaoke covers. [Arguments] song_langs : list string. Karaoke ID. list_type : int string. """ song_langs = params.get_list('song_langs', []) song_langs = [v for v in song_langs if v in ARG_SONG_LANGS] if not song_langs: raise RequestError(ERR_REQUEST_ARG, str_data=('song_langs.')) list_type = params.get_int('list_type', 1) if list_type not in xrange(1, 4): raise RequestError(ERR_REQUEST_ARG, str_data=('list_type.')) # Take the cover of first row of each song lang as index cover. table = u''' song_content, ( SELECT DISTINCT ON (lang) keymd5, lang FROM song_list WHERE lang IN %(lang)s AND type=%(type)s ORDER BY lang, rank ) l ''' fields = ['song_content.poster_url', 'l.lang'] condition = [u'song_content.keymd5=l.keymd5', {'lang': tuple(song_langs), 'type': str(list_type)}] db_inst = PGClient(db='karaoke') rows, rowcount = db_inst.query(table=table, fields=fields, condition=condition, ret_type='all') covers = {row['lang']: row['poster_url'] for row in rows} return Response(content={ 'covers': {key: covers[key] if covers.has_key(key) else None for key in song_langs} })
def get_addon(self, url, params, request, response): """ Get addon file. [Arguments] url[0] : string. Addon version. """ # API args if len(url): version = url[0] else: raise RequestError(ERR_REQUEST_ARG, str_data=('version error.')) # Compose cache file path file_name = 'CEC_%s.addon' % version cache_file_path = '%s/%s' % (FILE_CACHE_PATH, file_name) path_to_save = None # Save file if this file does not cached. # Get file if os.path.isfile(cache_file_path): # Take cached file if it found. iter_data = open(cache_file_path) else: # get file and cache it db_inst = PGClient(db='file', timeout=False) addons, rowcount = db_inst.query( table='addons', condition=[u'version=%(version)s', {'version': version}], ret_type='all' ) if not rowcount: raise RequestError(ERR_DB_NOT_FOUND) iter_data = iter(addons[0]['data']) # covert it to iter obj for avoiding multiple use. path_to_save = cache_file_path response.stream = True response.headers['Content-Type'] = 'application/octet-stream' response.headers['Content-Disposition'] = 'attachment; filename="%s"' % file_name return streaming_generator(iter_data=iter_data, file_name=path_to_save)
def search_total(keyword, lang=None, db_inst=None): keyword = list(set(keyword)) if not db_inst: db_inst = PGClient() # Compose SQL sql_data = {} keyword_sql = _gen_keyword_sql(keyword, sql_data) sql = u''' SELECT COUNT(*) FROM movies WHERE id IN ( SELECT id FROM movie_keyword WHERE source = '{1}' AND ({0}) @@ j_tsv ) '''.format( keyword_sql, lang2source(lang) ) return db_inst.execute(cmd=sql, data=sql_data).fetchone()[0]
def suggest_keyword(media_id, lang, db_inst=None): if not db_inst: db_inst = PGClient() # Get target movies info condition = ['id=%(id)s', {'id': media_id}] rows, rowcount = db_inst.query(table='movies', fields=['source, title, imdbid'], condition=condition, ret_type='all') if not rowcount: return [] if rows[0][ 'imdbid']: # if it has imdbid, then here attempt to get other source data. condition = [ 'imdbid=%(imdbid)s AND id!=%(id)s', { 'imdbid': rows[0]['imdbid'], 'id': media_id } ] other_rows, rowcount = db_inst.query(table='movies', fields=['source, title'], condition=condition, ret_type='all') rows.extend(other_rows) # Filter data by lang rules. filter_list = { LANG_ENG: [DB_SRC_IMDB], LANG_SCH: [DB_SRC_IMDB, DB_SRC_DOUBAN], LANG_TCH: [DB_SRC_IMDB, DB_SRC_DOUBAN, DB_SRC_ATMOVIES] }[lang] return [row['title'] for row in rows if row['source'] in filter_list]
def get_addon_version(self, url, params, request, response): """ Get addon version. """ # API args version = url[0] if len(url) else None db_inst = PGClient(db='file') if version: addons, rowcount = db_inst.query( table='addons', fields=['version', 'md5sum'], condition=[u'version=%(version)s', {'version': version}], ret_type='all' ) else: # latest version addons, rowcount = db_inst.query( table='addons', fields=['version', 'md5sum'], ret_type='all', else_sql='ORDER BY version DESC LIMIT 1' ) if not rowcount: raise RequestError(ERR_DB_NOT_FOUND) return Response({ 'version': addons[0]['version'], 'md5sum': addons[0]['md5sum'] })
def get_movie_titles(self, url, params, request, response): """ Get movie titles of each source. [Arguments] url[0] : string. IMDB ID. """ # API args if not url: raise RequestError(ERR_REQUEST_ARG, str_data=('format error.')) imdbid = url[0] db_inst = PGClient() condition = ['imdbid = %(imdbid)s', {'imdbid': imdbid}] rows, rowcount = db_inst.query(table='movies', fields=['source, title'], condition=condition, ret_type='all') return Response( content={ 'titles': { source2lang(row['source']): row['title'] for row in rows if source2lang(row['source']) is not None } } )
def run(self): db_inst = PGClient(db=self.db, timeout=False) try: db_inst.execute(self.sqlcmd) db_inst.commit() self.done = True # task done except Exception as e: self.error = True self.error_message = str(e) log.send(LOG_ERROR, str(e), traceback=True)
def update_latest_movies(self, url, params, request, response): """ Update latest movies. [Arguments] imdbid_list: list The imdbid list to update latest table. [Return] rowcount: int The row count of 'movie_latest'. """ imdbid_list = params.get_list('imdbid_list', None) # check argument 'imdbid_list' if not imdbid_list: raise RequestError(ERR_REQUEST_ARG, str_data=("argument 'imdbid_list' is need.", )) values = '' for data in imdbid_list: # check imdbid pattern reslut = re.match('tt\d{7}', data[0]) if reslut is None: raise RequestError( ERR_REQUEST_ARG, str_data=('imdbid_list(%s) is not imdbid format.' % data[0], )) values += "('%s', '%s')," % (data[0], data[1]) values = values[:-1] # insert db db_inst = PGClient() db_inst.execute('truncate table movie_latest') db_inst.execute('insert into movie_latest values %s' % values) db_inst.commit() cur = db_inst.execute('select count(*) from movie_latest') rowcount = cur.fetchone()[0] return Response(model_data={'rowcount': rowcount})
def get_movies(self, url, params, request, response): """ Get movies information. [API] 1) GET RESOURCE/${MOVIE_ID} Get target movie detail information. 2) GET RESOURCE?type=${TYPE} Get custom information. 3) GET RESOURCE Get all movie information. [Arguments] url[0] : string. Movie ID to find. type : string. Information type. since : int string. Response data start from. limit : int string. Number of response rows to display. lang : string. To choose display content. """ # API args movie_id = url[0] if len(url) else None request_type = params.get_string('type', REQUEST_LATEST) since = params.get_int('since', None) lang = params.get_string('lang', None) table = 'movies' fields = None condition = ['source=%(source)s', {'source': lang2source(lang)}] ordering_sql = None if movie_id: # Get one moive information limit = 0 condition = ['id=%(id)s', {'id': movie_id}] elif request_type: limit = params.get_limit('limit', 20) constraint_sql = """title!='' AND (posterurl is not null OR thumbnailurl is not null) AND directors!='{}' AND writers!='{}' AND stars!='{}' AND description is not null""" condition[0] = '%s AND %s' % (condition[0], constraint_sql) if request_type == REQUEST_POPLAR: # Get popular moive information ordering_sql = "ORDER BY total_count DESC NULLS LAST, releasedate->>'Default' DESC NULLS LAST" elif request_type == REQUEST_LATEST: # Get latest moive information table = 'movies, movie_latest' condition[0] = 'movies.imdbid = movie_latest.imdbid AND %s' % condition[0] ordering_sql = 'ORDER BY movie_latest.rdate DESC' else: raise RequestError(ERR_REQUEST_ARG, str_data=('type.')) else: # Get all moive information limit = params.get_limit('limit', 20) ordering_sql = 'ORDER BY year DESC' db_inst = PGClient() else_sql = '%s %s %s' % ( ordering_sql if ordering_sql else '', 'OFFSET %s' % since if since else '', 'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS ) movies, rowcount = db_inst.query(table=table, fields=fields, condition=condition, ret_type='all', else_sql=else_sql) if not rowcount and movie_id: raise RequestError(http_status=404) content = get_movies_content(movies_row=movies, db_inst=db_inst) return Response(model_data={ 'metadata': movies, 'content': content, 'detail': True if movie_id else False })
def search_karaokes(keyword, file_filter, keyword_filter, ordering=0, since=None, limit=10, lang=None): """ Search karaokes information. [Arguments] keyword : list. Search keyword. file_filter : int. (Not implement) File filter. keyword_filter : list. Keyword filter. ordering : int. Ordering rule. since : int. Response data start from. limit : int. Number of response rows to display. lang : string. To choose display content. """ keyword = list(set(keyword)) keyword_filter = list(set(keyword_filter)) db_inst = PGClient(db='karaoke') # Compose SQL keyword_sql = u' OR '.join([ u"songs.title ILIKE '%{0}%' OR songs.artist ILIKE '%{0}%'".format( escape_wildcard(k)) for k in keyword ]) if keyword_filter: keyword_filter_sql = u' OR '.join([ u"songs.title ILIKE '%{0}%' OR songs.artist ILIKE '%{0}%'".format( escape_wildcard(k)) for k in keyword_filter ]) condition_sql = '(%s) AND (%s)' % (keyword_sql, keyword_filter_sql) else: condition_sql = keyword_sql if ordering == 1: ordering_sql = u"ORDER BY title" # TODO elif ordering == 2: ordering_sql = u"ORDER BY total_count DESC NULLS LAST, title" elif ordering == 3: ordering_sql = u"ORDER BY total_count ASC NULLS LAST, title" else: ordering_sql = u"ORDER BY title" # TODO # Note here duplicate field overlaped. sql = u''' SELECT songs.*, song_content.* FROM songs, song_content WHERE songs.keymd5=song_content.keymd5 AND ({0}) {1} {2} {3} '''.format( condition_sql, ordering_sql, u'OFFSET %s' % since if since else '', u'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS) songs = db_inst.execute(cmd=sql).fetchall() return {'metadata': songs}
def get_karaokes(self, url, params, request, response): """ Get karaokes information. [API] 1) GET RESOURCE/${KARAOKE_ID} Get target karaokes detail information. 2) GET RESOURCE?type=${TYPE} Get custom information. [Arguments] url[0] : string. Song ID to find. since : int string. Response data start from. limit : int string. Number of response rows to display. lang : string. To choose display content. song_lang : string. To choose language type of songs. list_type : int string. To choose songs list type. """ # API args karaoke_id = url[0] if len(url) else None since = params.get_int('since', None) lang = params.get_string('lang', None) table = 'songs' fields = None condition = [] ordering_sql = None if karaoke_id: # Get one song information limit = 0 condition = ['keymd5=%(id)s', {'id': karaoke_id}] else: # Get song list information limit = params.get_limit('limit', 20) song_lang = params.get_string('song_lang', 'M') if song_lang not in ARG_SONG_LANGS: raise RequestError(ERR_REQUEST_ARG, str_data=('song_lang.')) list_type = params.get_int('list_type', 1) if list_type not in xrange(1, 4): raise RequestError(ERR_REQUEST_ARG, str_data=('list_type.')) table = 'songs, song_list' fields = ['songs.*', 'song_list.udate AS list_udate'] condition = [ 'songs.keymd5=song_list.keymd5 AND (song_list.lang=%(lang)s AND song_list.type=%(type)s)', {'lang': song_lang, 'type': str(list_type)} ] ordering_sql = 'ORDER BY song_list.rank' db_inst = PGClient(db='karaoke') else_sql = '%s %s %s' % ( ordering_sql if ordering_sql else '', 'OFFSET %s' % since if since else '', 'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS ) songs, rowcount = db_inst.query( table=table, fields=fields, condition=condition, ret_type='all', else_sql=else_sql ) if not rowcount and karaoke_id: raise RequestError(http_status=404) content = get_karaokes_content(karaokes_row=songs, db_inst=db_inst) return Response(model_data={ 'metadata': songs, 'content': content, 'detail': True if karaoke_id else False })
def upload_addon(self, url, params, request, response): """ Upload addon file. [Arguments] url[0] : string. Addon version of this file. md5sum : string. (opt) md5sum of this file to check data is complete. """ # API args if len(url): version = url[0] else: raise RequestError(ERR_REQUEST_ARG, str_data=('version error.')) if not params.has_key('file'): raise RequestError(ERR_REQUEST_ARG, str_data=('file error.')) file_md5sum = params.get_string('md5sum', None) # check data exist db_inst = PGClient(db='file', timeout=False) cur, rowcount = db_inst.query( table='addons', fields=['version'], condition=[u'version=%(version)s', { 'version': version }]) if rowcount: raise RequestError(ERR_REQUEST_ARG, str_data=('data existed.')) # handle updating file upload_file = params['file'] tmp_file_path = '%s/%s' % (TMP_PATH, upload_file.filename) try: # save file md5 = hashlib.md5() with open(tmp_file_path, mode='wb') as fp: while True: data = upload_file.file.read(8192) if data: fp.write(data) md5.update(data) else: break # compare md5sum md5sum = md5.hexdigest() if file_md5sum and md5sum != file_md5sum: raise RequestError(ERR_REQUEST_ARG, str_data=('md5sum check error.')) # insert data to db with open(tmp_file_path, mode='rb') as f: db_inst.insert(table='addons', data={ 'version': version, 'name': upload_file.filename, 'data': Binary(f.read()), 'md5sum': md5sum, 'udate': datetime.utcnow().isoformat() }) # remove redundant data addons, rowcount = db_inst.query(table='addons', fields=['version'], ret_type='all', else_sql='ORDER BY version DESC') if rowcount > MAX_NUM_ADDONS: versions_to_remove = [ addons[-1 * index]['version'] for index in xrange(1, rowcount - MAX_NUM_ADDONS + 1) ] db_inst.remove(table='addons', condition=[ 'version IN %(versions)s', { 'versions': tuple(versions_to_remove) } ]) db_inst.commit() return Response({'version': version, 'md5sum': md5sum}) finally: os.remove(tmp_file_path)
def get_TVs(self, url, params, request, response): """ Get TVs information. [API] 1) GET RESOURCE/${DRAMA_ID} Get target dramas detail information. 2) GET RESOURCE?type=${TYPE} Get custom information. [Arguments] url[0] : string. Drama ID to find. since : int string. Response data start from. limit : int string. Number of response rows to display. lang : string. To choose display content. drama_kind : int string. To choose kind of dramas. type : string. The type to display information. """ # API args drama_id = url[0] if len(url) else None request_type = params.get_string('type', REQUEST_LATEST) since = params.get_int('since', None) lang = params.get_string('lang', None) condition_sql = '' condition_data = {} ordering_sql = None if drama_id: # Get one drama information limit = 0 condition_sql = 'id=%(id)s' condition_data = {'id': drama_id} else: # Get drama list information limit = params.get_limit('limit', 20) kind = params.get_int('drama_kind', 1) if kind not in xrange(0, 9): raise RequestError(ERR_REQUEST_ARG, str_data=('drama_kind.')) condition_sql = "kind='%s'" % kind condition_sql = "%s AND source='%s'" % (condition_sql, lang2source(lang)) condition_sql = "%s AND '%s' && ARRAY(SELECT json_object_keys(play_urls))" % ( condition_sql, self.sql_array_string() ) if request_type == REQUEST_POPLAR: # Get popular drama information ordering_sql = "ORDER BY total_count DESC NULLS LAST, rdate DESC NULLS LAST" elif request_type == REQUEST_LATEST: # Get latest drama information ordering_sql = 'ORDER BY rdate DESC NULLS LAST' else: raise RequestError(ERR_REQUEST_ARG, str_data=('type.')) dramas_sql = u''' SELECT * FROM drama WHERE {0} {1} {2} {3} '''.format( condition_sql, ordering_sql if ordering_sql else '', 'OFFSET %s' % since if since else '', 'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS ) db_inst = PGClient() cur = db_inst.execute(cmd=gen_fetch_dramas_sql(dramas_sql=dramas_sql), data=condition_data) if not cur.rowcount and drama_id: raise RequestError(http_status=404) return Response(model_data={ 'data': cur.fetchall(), 'detail': False if drama_id else True })
def search_avs(keyword, file_filter, keyword_filter, ordering=0, since=None, limit=10, lang=None): """ Search AVs information. [Arguments] keyword : list. Search keyword. file_filter : int. (Not implement) File filter. keyword_filter : list. Keyword filter. ordering : int. Ordering rule. since : int. Response data start from. limit : int. Number of response rows to display. lang : string. To choose display content. """ keyword = list(set(keyword)) keyword_filter = list(set(keyword_filter)) db_inst = PGClient(db='av') # Compose SQL keyword_sql = u' OR '.join( [u"keywords ILIKE '%{0}%'".format(escape_wildcard(k)) for k in keyword]) if keyword_filter: keyword_filter_sql = u' OR '.join( [u"keywords ILIKE '%{0}%'".format(escape_wildcard(k)) for k in keyword_filter]) condition_sql = '(%s) AND (%s)' % (keyword_sql, keyword_filter_sql) else: condition_sql = keyword_sql if ordering == 1: ordering_sql = u"ORDER BY date ASC NULLS LAST" elif ordering == 2: ordering_sql = u"ORDER BY total_count DESC NULLS LAST, date DESC NULLS LAST" elif ordering == 3: ordering_sql = u"ORDER BY total_count ASC NULLS LAST, date ASC" else: ordering_sql = u"ORDER BY date DESC NULLS LAST" sql = u''' SELECT * FROM video WHERE id IN ( SELECT id FROM video_keyword WHERE {0} ) AND date <= '{4}' {1} {2} {3} '''.format( condition_sql, ordering_sql, u'OFFSET %s' % since if since else '', u'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS, datetime.utcnow().isoformat().split('T')[0] ) videos = db_inst.execute(cmd=sql).fetchall() return { 'metadata': videos }
def get_movies(self, url, params, request, response): """ Get movies information. [API] 1) GET RESOURCE/${MOVIE_ID} Get target movie detail information. 2) GET RESOURCE?type=${TYPE} Get custom information. 3) GET RESOURCE Get all movie information. [Arguments] url[0] : string. Movie ID to find. type : string. Information type. since : int string. Response data start from. limit : int string. Number of response rows to display. lang : string. To choose display content. """ # API args movie_id = url[0] if len(url) else None request_type = params.get_string('type', REQUEST_LATEST) since = params.get_int('since', None) lang = params.get_string('lang', None) table = 'movies' fields = None condition = ['source=%(source)s', {'source': lang2source(lang)}] ordering_sql = None if movie_id: # Get one moive information limit = 0 condition = ['id=%(id)s', {'id': movie_id}] elif request_type: limit = params.get_limit('limit', 20) constraint_sql = """title!='' AND (posterurl is not null OR thumbnailurl is not null) AND directors!='{}' AND writers!='{}' AND stars!='{}' AND description is not null""" condition[0] = '%s AND %s' % (condition[0], constraint_sql) if request_type == REQUEST_POPLAR: # Get popular moive information ordering_sql = "ORDER BY total_count DESC NULLS LAST, releasedate->>'Default' DESC NULLS LAST" elif request_type == REQUEST_LATEST: # Get latest moive information table = 'movies, movie_latest' condition[ 0] = 'movies.imdbid = movie_latest.imdbid AND %s' % condition[ 0] ordering_sql = 'ORDER BY movie_latest.rdate DESC' else: raise RequestError(ERR_REQUEST_ARG, str_data=('type.')) else: # Get all moive information limit = params.get_limit('limit', 20) ordering_sql = 'ORDER BY year DESC' db_inst = PGClient() else_sql = '%s %s %s' % (ordering_sql if ordering_sql else '', 'OFFSET %s' % since if since else '', 'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS) movies, rowcount = db_inst.query(table=table, fields=fields, condition=condition, ret_type='all', else_sql=else_sql) if not rowcount and movie_id: raise RequestError(http_status=404) content = get_movies_content(movies_row=movies, db_inst=db_inst) return Response( model_data={ 'metadata': movies, 'content': content, 'detail': True if movie_id else False })
def get_avs(self, url, params, request, response): """ Get AVs information. [API] 1) GET RESOURCE/${AV_ID} Get target AVs detail information. 2) GET RESOURCE?type=${TYPE} Get custom information. [Arguments] url[0] : string. Song ID to find. type : string. Information type. since : int string. Response data start from. limit : int string. Number of response rows to display. lang : string. To choose display content. """ # API args av_id = url[0] if len(url) else None request_type = params.get_string('type', REQUEST_LATEST) since = params.get_int('since', None) lang = params.get_string('lang', None) fields = None condition = [] ordering_sql = None if av_id: # Get one AV information limit = 0 condition = ['id=%(id)s', {'id': av_id}] else: limit = params.get_limit('limit', 20) if request_type == REQUEST_POPLAR: # Get popular AVs information ordering_sql = 'ORDER BY total_count DESC NULLS LAST, date DESC NULLS LAST' elif request_type == REQUEST_LATEST: # Get latest AVs information now_date = datetime.utcnow().isoformat().split('T')[0] condition = ['date <= %(date)s', {'date': now_date}] ordering_sql = 'ORDER BY date DESC NULLS LAST' else: raise RequestError(ERR_REQUEST_ARG, str_data=('type.')) db_inst = PGClient(db='av') else_sql = '%s %s %s' % (ordering_sql if ordering_sql else '', 'OFFSET %s' % since if since else '', 'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS) avs, rowcount = db_inst.query(table='video', fields=fields, condition=condition, ret_type='all', else_sql=else_sql) if not rowcount and av_id: raise RequestError(http_status=404) return Response(model_data={ 'metadata': avs, 'detail': True if av_id else False })
def search_karaokes(keyword, file_filter, keyword_filter, ordering=0, since=None, limit=10, lang=None): """ Search karaokes information. [Arguments] keyword : list. Search keyword. file_filter : int. (Not implement) File filter. keyword_filter : list. Keyword filter. ordering : int. Ordering rule. since : int. Response data start from. limit : int. Number of response rows to display. lang : string. To choose display content. """ keyword = list(set(keyword)) keyword_filter = list(set(keyword_filter)) db_inst = PGClient(db='karaoke') # Compose SQL keyword_sql = u' OR '.join( [u"songs.title ILIKE '%{0}%' OR songs.artist ILIKE '%{0}%'".format(escape_wildcard(k)) for k in keyword]) if keyword_filter: keyword_filter_sql = u' OR '.join( [u"songs.title ILIKE '%{0}%' OR songs.artist ILIKE '%{0}%'".format(escape_wildcard(k)) for k in keyword_filter] ) condition_sql = '(%s) AND (%s)' % (keyword_sql, keyword_filter_sql) else: condition_sql = keyword_sql if ordering == 1: ordering_sql = u"ORDER BY title" # TODO elif ordering == 2: ordering_sql = u"ORDER BY total_count DESC NULLS LAST, title" elif ordering == 3: ordering_sql = u"ORDER BY total_count ASC NULLS LAST, title" else: ordering_sql = u"ORDER BY title" # TODO # Note here duplicate field overlaped. sql = u''' SELECT songs.*, song_content.* FROM songs, song_content WHERE songs.keymd5=song_content.keymd5 AND ({0}) {1} {2} {3} '''.format( condition_sql, ordering_sql, u'OFFSET %s' % since if since else '', u'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS ) songs = db_inst.execute(cmd=sql).fetchall() return { 'metadata': songs }
def search_movies(keyword, file_filter, keyword_filter, ordering=0, since=None, limit=10, lang=None): """ Search movies information. [Arguments] keyword : list. Search keyword. file_filter : int. (Not implement) File filter. keyword_filter : list. Keyword filter. ordering : int. Ordering rule. since : int. Response data start from. limit : int. Number of response rows to display. lang : string. To choose display content. """ keyword = list(set(keyword)) keyword_filter = list(set(keyword_filter)) db_inst = PGClient() # Compose SQL sql_data = {} keyword_sql = _gen_keyword_sql(keyword, sql_data) if keyword_filter: keyword_filter_sql = _gen_keyword_sql(keyword_filter, sql_data, prefix_key='kf') condition_sql = u'(%s) && (%s)' % (keyword_sql, keyword_filter_sql) else: condition_sql = keyword_sql if ordering == 1: ordering_sql = u"ORDER BY releasedate->>'Default' ASC NULLS LAST" elif ordering == 2: ordering_sql = u"ORDER BY total_count DESC NULLS LAST, releasedate->>'Default' DESC NULLS LAST" elif ordering == 3: ordering_sql = u"ORDER BY total_count ASC NULLS LAST, releasedate->>'Default' ASC" else: ordering_sql = u"ORDER BY releasedate->>'Default' DESC NULLS LAST" sql = u''' SELECT * FROM movies WHERE id IN ( SELECT id FROM movie_keyword WHERE source = '{1}' AND ({0}) @@ j_tsv ) {2} {3} {4} '''.format( condition_sql, lang2source(lang), ordering_sql, u'OFFSET %s' % since if since else '', u'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS) movies = db_inst.execute(cmd=sql, data=sql_data).fetchall() content = get_movies_content(movies_row=movies, db_inst=db_inst) return {'metadata': movies, 'content': content}
def get_TVs(self, url, params, request, response): """ Get TVs information. [API] 1) GET RESOURCE/${DRAMA_ID} Get target dramas detail information. 2) GET RESOURCE?type=${TYPE} Get custom information. [Arguments] url[0] : string. Drama ID to find. since : int string. Response data start from. limit : int string. Number of response rows to display. lang : string. To choose display content. drama_kind : int string. To choose kind of dramas. type : string. The type to display information. """ # API args drama_id = url[0] if len(url) else None request_type = params.get_string('type', REQUEST_LATEST) since = params.get_int('since', None) lang = params.get_string('lang', None) condition_sql = '' condition_data = {} ordering_sql = None if drama_id: # Get one drama information limit = 0 condition_sql = 'id=%(id)s' condition_data = {'id': drama_id} else: # Get drama list information limit = params.get_limit('limit', 20) kind = params.get_int('drama_kind', 1) if kind not in xrange(0, 9): raise RequestError(ERR_REQUEST_ARG, str_data=('drama_kind.')) condition_sql = "kind='%s'" % kind condition_sql = "%s AND source='%s'" % (condition_sql, lang2source(lang)) condition_sql = "%s AND '%s' && ARRAY(SELECT json_object_keys(play_urls))" % ( condition_sql, self.sql_array_string()) if request_type == REQUEST_POPLAR: # Get popular drama information ordering_sql = "ORDER BY total_count DESC NULLS LAST, rdate DESC NULLS LAST" elif request_type == REQUEST_LATEST: # Get latest drama information ordering_sql = 'ORDER BY rdate DESC NULLS LAST' else: raise RequestError(ERR_REQUEST_ARG, str_data=('type.')) dramas_sql = u''' SELECT * FROM drama WHERE {0} {1} {2} {3} '''.format( condition_sql, ordering_sql if ordering_sql else '', 'OFFSET %s' % since if since else '', 'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS) db_inst = PGClient() cur = db_inst.execute(cmd=gen_fetch_dramas_sql(dramas_sql=dramas_sql), data=condition_data) if not cur.rowcount and drama_id: raise RequestError(http_status=404) return Response(model_data={ 'data': cur.fetchall(), 'detail': False if drama_id else True })
def search_movies(keyword, file_filter, keyword_filter, ordering=0, since=None, limit=10, lang=None): """ Search movies information. [Arguments] keyword : list. Search keyword. file_filter : int. (Not implement) File filter. keyword_filter : list. Keyword filter. ordering : int. Ordering rule. since : int. Response data start from. limit : int. Number of response rows to display. lang : string. To choose display content. """ keyword = list(set(keyword)) keyword_filter = list(set(keyword_filter)) db_inst = PGClient() # Compose SQL sql_data = {} keyword_sql = _gen_keyword_sql(keyword, sql_data) if keyword_filter: keyword_filter_sql = _gen_keyword_sql(keyword_filter, sql_data, prefix_key='kf') condition_sql = u'(%s) && (%s)' % (keyword_sql, keyword_filter_sql) else: condition_sql = keyword_sql if ordering == 1: ordering_sql = u"ORDER BY releasedate->>'Default' ASC NULLS LAST" elif ordering == 2: ordering_sql = u"ORDER BY total_count DESC NULLS LAST, releasedate->>'Default' DESC NULLS LAST" elif ordering == 3: ordering_sql = u"ORDER BY total_count ASC NULLS LAST, releasedate->>'Default' ASC" else: ordering_sql = u"ORDER BY releasedate->>'Default' DESC NULLS LAST" sql = u''' SELECT * FROM movies WHERE id IN ( SELECT id FROM movie_keyword WHERE source = '{1}' AND ({0}) @@ j_tsv ) {2} {3} {4} '''.format( condition_sql, lang2source(lang), ordering_sql, u'OFFSET %s' % since if since else '', u'LIMIT %s' % limit if MAX_RESPONSE_ROWS > limit > 0 else 'LIMIT %s' % MAX_RESPONSE_ROWS ) movies = db_inst.execute(cmd=sql, data=sql_data).fetchall() content = get_movies_content(movies_row=movies, db_inst=db_inst) return { 'metadata': movies, 'content': content }
def upload_addon(self, url, params, request, response): """ Upload addon file. [Arguments] url[0] : string. Addon version of this file. md5sum : string. (opt) md5sum of this file to check data is complete. """ # API args if len(url): version = url[0] else: raise RequestError(ERR_REQUEST_ARG, str_data=('version error.')) if not params.has_key('file'): raise RequestError(ERR_REQUEST_ARG, str_data=('file error.')) file_md5sum = params.get_string('md5sum', None) # check data exist db_inst = PGClient(db='file', timeout=False) cur, rowcount = db_inst.query( table='addons', fields=['version'], condition=[u'version=%(version)s', {'version': version}] ) if rowcount: raise RequestError(ERR_REQUEST_ARG, str_data=('data existed.')) # handle updating file upload_file = params['file'] tmp_file_path = '%s/%s' % (TMP_PATH, upload_file.filename) try: # save file md5 = hashlib.md5() with open(tmp_file_path, mode='wb') as fp: while True: data = upload_file.file.read(8192) if data: fp.write(data) md5.update(data) else: break # compare md5sum md5sum = md5.hexdigest() if file_md5sum and md5sum != file_md5sum: raise RequestError(ERR_REQUEST_ARG, str_data=('md5sum check error.')) # insert data to db with open(tmp_file_path, mode='rb') as f: db_inst.insert(table='addons', data={ 'version': version, 'name': upload_file.filename, 'data': Binary(f.read()), 'md5sum': md5sum, 'udate': datetime.utcnow().isoformat() }) # remove redundant data addons, rowcount = db_inst.query( table='addons', fields=['version'], ret_type='all', else_sql='ORDER BY version DESC' ) if rowcount > MAX_NUM_ADDONS: versions_to_remove = [addons[-1*index]['version'] for index in xrange(1, rowcount-MAX_NUM_ADDONS+1)] db_inst.remove( table='addons', condition=['version IN %(versions)s', {'versions': tuple(versions_to_remove)}] ) db_inst.commit() return Response({ 'version': version, 'md5sum': md5sum }) finally: os.remove(tmp_file_path)
def get_metadata(self, url, params, request, response): """ Use movie id of any source to get all metadata. [Arguments] ids : list string. IDs of movies to get metadata. """ # API args ids = params.get_list('ids', None) if not ids: raise RequestError(ERR_REQUEST_ARG, str_data=('ids.')) resp = Response(content={'movies': {}}) db_inst = PGClient() # Get items with ids condition = ['id IN %(id)s', {'id': tuple(ids)}] movies, rowcount = db_inst.query(table='movies', condition=condition, ret_type='all') for movie in movies: resp['movies'][movie['id']] = { 'imdbid': movie['imdbid'], 'metadata': {} } # Get metadata of each source. if movie['imdbid']: condition = [ 'imdbid = %(imdbid)s', { 'imdbid': movie['imdbid'] } ] rows, rowcount = db_inst.query(table='movies', condition=condition, ret_type='all') else: rows = [movie] for row in rows: resp['movies'][movie['id']]['metadata'][row['source']] = { 'id': row['id'], 'source': row['source'], 'title': row['title'], 'akas': row['akas'], 'genres': row['genres'], 'rating': row['rating'], 'posterurl': row['posterurl'], 'directors': row['directors'], 'stars': row['stars'], 'releasedate': row['releasedate'], 'countries': row['countries'], 'description': row['description'], 'url': row['url'], 'md5sum': row['md5sum'], 'thumbnailurl': row['thumbnailurl'] } # patch up not found data. for missing_id in set(ids).difference(resp['movies'].keys()): resp['movies'][missing_id] = {} return resp
def run(self): patch_data = None try: patch_data = PatchData(DIR_TEMP_PATH, self.fileobj) # patch db db_inst = PGClient(self.patcher.db, timeout=False) with open(patch_data.get_filepath(), mode='r') as f: db_inst.execute('begin;') # create temp table db_inst.execute(self.patcher.temp_table_sqlcmd) # file obj column order must match temp table column order db_inst.copy_from(f, self.patcher.temp_table_name) db_inst.execute(self.patcher.sqlcmd) db_inst.execute('commit;') # task done self.done = True except Exception as e: self.error = True self.error_message = e.message if e.message is not None else vars(e) log.send(LOG_ERROR, self.error_message, traceback=True) finally: if patch_data is not None: patch_data.remove()