fileRead.read_file(logdir + "ORACLE_SQL.log") log.flush_log() monitorReport.monitor_start(monitor_log) jsonParallel.urls.append('http://my.tv.sohu.com/wm/u/vids.do?vid=%s') jsonParallel.urls.append( 'http://api.my.tv.sohu.com/video/videoinfolist.do?vid=%s') jsonSerial.urls.append( 'http://my.tv.sohu.com/user/a/media/userGet.do?uid=%s&vid=%s') jsonParallel.json_start() jsonSerial.json_start() jsonParallel.retry_timeout() jsonSerial.retry_timeout() fileWrite.checkpoint_save(checkpoint_log, monitorReport.map_store) fileWrite.write_uids(uids_log, monitorReport.uids) mysqlWrap.insert_report(monitorReport.map_store) mysqlWrap.update_report_patch() log.end_log() elif cmp(sys.argv[1], '-DqueryOracle') == 0: mydate = Util.get_yesterday() if len(sys.argv) == 3: mydate = sys.argv[2] monitorReport.mydate = MyDate(mydate) logdir = "/home/qs/scripts/video_report_yyyymm/" + monitorReport.mydate.get_now( ) + "/"
log = Log(base_log + "/stdout.log") log.begin_log() hiveQuery = HiveQuery(base_log, myQueue, dt=mydate) jobs_size = constants.myContants.keys().__len__() + 1 myThread = MyThread(myQueue, constants, jobs_size, monitor, base_log, reportMysql) myThread.start() donateMysql.query_all_donate(base_log + "MysqlDonate") reportOracle.write_rows_file(base_log + "OracleAd") hiveQuery.query_hive() myThread.sub_job_join() fileWrite.checkpoint_save(base_log + "checkpoint.log", copy.deepcopy(monitor)) reportMysql.check_create_table(26) reportMysql.insert_mysql_duplicate(monitor) reportMysql.update_report_patch() log.end_log() elif cmp(sys.argv[1], '-DupdateAssignField') == 0: updateAssignField = 'all' mydate = Util.get_yesterday() if len(sys.argv) < 3: for k, v in constants.myContants.items(): print " %s %s" % (str(k).ljust(40), str(v.keys()).rjust(20)) exit() if len(sys.argv) >= 3:
fileRead.read_file(logdir + "IP_SQL.log") fileRead.read_file(logdir + "CDN_VV_DISTINCT_SQL.log") fileRead.read_file(logdir + "ORACLE_SQL.log") log.flush_log() monitorReport.monitor_start(monitor_log) jsonParallel.urls.append('http://my.tv.sohu.com/wm/u/vids.do?vid=%s') jsonParallel.urls.append('http://api.my.tv.sohu.com/video/videoinfolist.do?vid=%s') jsonSerial.urls.append('http://my.tv.sohu.com/user/a/media/userGet.do?uid=%s&vid=%s') jsonParallel.json_start() jsonSerial.json_start() jsonParallel.retry_timeout() jsonSerial.retry_timeout() fileWrite.checkpoint_save(checkpoint_log, monitorReport.map_store) fileWrite.write_uids(uids_log, monitorReport.uids) mysqlWrap.insert_report(monitorReport.map_store) mysqlWrap.update_report_patch() log.end_log() elif cmp(sys.argv[1], '-DqueryOracle') == 0: mydate = Util.get_yesterday() if len(sys.argv) == 3: mydate = sys.argv[2] monitorReport.mydate=MyDate(mydate) logdir = "/home/qs/scripts/video_report_yyyymm/"+monitorReport.mydate.get_now()+"/" oracle_log = logdir + 'ORACLE_SQL.log'
Util.file_mkdirs(os.path.dirname(base_log)) log = Log(base_log+"/stdout.log") log.begin_log() hiveQuery = HiveQuery(base_log,myQueue,dt=mydate) jobs_size = constants.myContants.keys().__len__() + 1 myThread = MyThread(myQueue,constants,jobs_size,monitor,base_log,reportMysql) myThread.start() donateMysql.query_all_donate(base_log + "MysqlDonate") reportOracle.write_rows_file(base_log + "OracleAd") hiveQuery.query_hive() myThread.sub_job_join() fileWrite.checkpoint_save(base_log+"checkpoint.log",copy.deepcopy(monitor)) reportMysql.check_create_table(26) reportMysql.insert_mysql_duplicate(monitor) reportMysql.update_report_patch() log.end_log() elif cmp(sys.argv[1], '-DupdateAssignField') == 0: updateAssignField = 'all' mydate = Util.get_yesterday() if len(sys.argv) < 3: for k,v in constants.myContants.items(): print " %s %s" % (str(k).ljust(40),str(v.keys()).rjust(20)) exit() if len(sys.argv) >= 3: