def s_task_review_read_main(client_id, limit=1): rows = db_app.db_get_g(db_sql.sql_review_read_get_server_task, (limit, )) i_t = len(rows) i = 0 jobs = {} for row in rows: i = i + 1 #print '%d of %d'%(i, i_t), app_id = row[0] page_num = row[1] #page_num = 490 review_type = row[2] review_sort_order = row[3] job = { 'app_id':app_id, 'pageNum':page_num, 'review_type':review_type, 'review_sort_order':review_sort_order } jobs[i] = job print jobs for job in jobs: j = jobs[job] app_id = j['app_id'] db_app.db_execute_g(db_sql.sql_review_read_update_server_task, (client_id, str(datetime.now()), app_id, )) return jobs
def s_cron_all_review_read(): #client_id = 'dtc' rows = db_app.db_get_g(db_sql.sql_review_read_get_cron_all, (client_id, )) for row in rows: app_id = row[0] #print app_id db_app.db_execute_g(db_sql.sql_review_read_update_cron, (str(datetime.now()), app_id, )) return len(rows)
def google_plus_read_main(): finish = True rows = db_app.db_get_g(db_sql.sql_app_google_plus_get, ()) for row in rows: finish = False app_id = row[0] google_plus_href = row[1] try: google_plus_read(app_id, google_plus_href, ) util.sleep() except Exception as e: err.except_p(e)
def app_read_main_temp(): finish = True rows = db_app.db_get_g(db_sql.sql_app_read_get, ()) i_t = len(rows) i = 0 for row in rows: i = i + 1 print '%d of %d'%(i, i_t), finish = False app_id = row[0] app_read(app_id) util.sleep() return finish
def developer_merge(): rows = db_app.db_get_g(db_sql.sql_developer_merge_get, ()) i_t = len(rows) print '** start to merge developer test list %d from %s to %s **'%(i_t, db_app.db_path, db_developer.db_path) i = 0 p = 0 db = db_developer.db c = db.cursor() for row in rows: developer_href = row[0] developer_website = row[1] c.execute(db_sql.sql_developer_merge_insert, (developer_href, developer_website, )) p, i = util.p_percent_copy(p, i, i_t, 1, db) db.commit() c.close()
def related_read_merge(): rows = db_app.db_get_g(db_sql.sql_related_merge_get, ()) i_t = len(rows) print '** start to merge related list %d from %s to %s %d **'%(i_t, db_app.db_path, db_related.db_path, i_t) i = 0 p = 0 db = db_related.db c = db.cursor() for row in rows: app_id = row[0] c.execute(db_sql.sql_related_merge_insert, (app_id, )) p, i = util.p_percent_copy(p, i, i_t, 1, db) #print str(p)+'%'+'..', db.commit() c.close()
def review_read_main(): rows = db_app.db_get_g(db_sql.sql_review_read_get, ()) i_t = len(rows) i = 0 for row in rows: i = i + 1 print '%d of %d'%(i, i_t), app_id = row[0] page_num = row[1] #page_num = 490 review_type = row[2] review_sort_order = row[3] status = 200 while status == 200: status, page_num = review_read_loop(app_id, page_num, review_type, review_sort_order) time.sleep(10)
def video_read_main(): finish = True rows = db_app.db_get_g(db_sql.sql_video_get, ()) i_t = len(rows) i = 0 for row in rows: i = i + 1 print '%d of %d'%(i, i_t), finish = False app_id = row[0] video_href = row[1] view_total = row[2] video_href_d = video_href.split('/')[-1] video_id = video_href_d.split('?')[0].strip() try: video_read(video_id, app_id, video_href) util.sleep() except Exception as e: err.except_p(e) return finish
def review_read_main_init(): rows = db_app.db_get_g(db_sql.sql_review_read_app_get, ()) for row in rows: app_id = row[0] db_app.db_execute_g(db_sql.sql_review_read_insert, (app_id,))