Example #1
0
 def api_get(self):
     try:
         self.conn.request(method="GET", url=self.query_str)
         resp = self.conn.getresponse()
     except Exception, e:
         self.errmsg = "domain: %s query: %s error: %s" % (self.domain, self.query_str, str(e))
         logger_root.error(self.errmsg)
 def check_start_status(self):
     if self.start_check() and self.check_status():
         time.sleep(3)
         for i in range(5):
             if mod_name == "gxb-sso" and check_mod.check_login():
                 logger_root.info("[%s] API 调用成功!" % self.host)
                 start_flag = True
                 break
             elif check_mod.check_status():
                 logger_root.info("[%s] API 调用成功!" % self.host)
                 start_flag = True
                 break
             else:
                 start_flag = False
                 time.sleep(10)
                 continue
         if not start_flag:
             logger_root.error("[%s] API 调用不成功!" % self.host)
             logger_console.error("[%s] API 调用不成功!" % self.host)
         if action != "gray_update" and start_flag:
             if mod_name != "gxb-scheduler":
                 # 调用nginx减注释方法,在发版主机启动后取消nginx注释
                 logger_root.info("[%s]执行解注释函数!" % self.host)
                 nginx_mod.dec(self.host)
     else:
         logger_root.error("[%s]未检测到程序端口[%s]或者API调用失败,程序启动失败!" % (self.host, ser_port))
         logger_console.error("[%s]未检测到程序端口[%s]或者API调用失败,程序启动失败!" % (self.host, ser_port))
Example #3
0
 def get_school_id(self):
     if Config.school_id != '':
         self.ret = Config.school_id
     else:
         self.errmsg = 'school_id config was None.'
         logger_root.error(self.errmsg)
         raise Exception, '%s' % self.errmsg
     return self.ret
Example #4
0
 def get_course_id(self):
     if Config.course_id != '':
         self.ret = Config.course_id
     else:
         self.errmsg = 'course_id config was None.'
         logger_root.error(self.errmsg)
         raise Exception, '%s' % self.errmsg
     return self.ret
Example #5
0
 def api_post(self, pos=""):
     headers = {"Content-type": "application/json", "Accept": "application/json"}
     # pos = urllib.urlencode(pos) #RESTful API接受json格式的数据,x-www-form-urlencoded格式的数据才需要进行url编码
     try:
         self.conn.request(method="POST", url=self.query_str, body=pos, headers=headers)
         resp = self.conn.getresponse()
     except Exception, e:
         self.errmsg = "domain: %s query: %s error: %s" % (self.domain, self.query_str, str(e))
         logger_root.error(self.errmsg)
 def run_command(cmd,user=user,port=port,password=password,host=host,stdout="stdout",):
     logger_root.info('start exec command %s' % cmd)
     client=paramiko.SSHClient()
     client.set_missing_host_key_policy(paramiko.AutoAddPolicy())
     client.load_system_host_keys()
     port=int(port)
     #logger_root.info('start connect %s,%s,%s,%s' % (host,port,user,password) )
     client.connect(hostname=host, port=port,username=user,password=password,timeout=10)
     stdin,stdout,stderr = client.exec_command(cmd)
     stdin.write("%s\n" % password)  #这两行是执行sudo命令要求输入密码时需要的
     stdin.flush()                         #执行普通命令的话不需要这两行
     logger_root.error(stderr.read())
     logger_console.error(stderr.read())
     if stdout == "stdout":
         logger_root.info(stdout.read())
     else:
         return stdout.read()
     client.close()
 def check_server(self):
     if auto:
         logger_root.info("*"*10)
         if mod_name == "common":
             check_all_server()
         if mod_name == "mysql":
             logger_root.info("[%s] 开始检测!" % self.host)
             check_mod.check_mysql()
         elif mod_name == "redis":
             logger_root.info("[%s] 开始检测!" % self.host)
             check_mod.check_redis()
         elif mod_name == "gxb-sso" and check_mod.check_login():
             logger_root.info("[%s] API 调用成功!" % self.host)
         elif check_mod.check_status():
             logger_root.info("[%s] API 调用成功!" % self.host)
         else:
             logger_root.error("[%s] API 调用不成功!" % self.host)
             logger_console.error("[%s] API 调用不成功!" % self.host)
 def git_mod(self):
     api_type = self.mod_name
     if cf.has_option(api_type, "git_ip"):
         git_host = cf.get(api_type, "git_ip")
     else:
         logger_root.error("必须设置本地仓库机器的ip!")
         logger_console.error("必须设置本地仓库机器的ip!")
         sys.exit()
     (status,output)=commands.getstatusoutput('fab -H %s -f %slib/fabfile.py go:%s' % (git_host,src_dir_prefix,api_type))
     if log_detail == "True":
         logger_root.info(output)
         logger_root.info("#"*30)
     if status == 0:
         logger_root.info("模块%s上传war包成功!" % mod_name)
     else:
         logger_root.error("模块%s上传war包失败!" % mod_name)
         logger_console.error("模块%s上传war包失败!" % mod_name)
         sys.exit()
Example #9
0
    def get_prefixs(self):
        prefixs = {}
        if Config.mp4_prefix != '':
            prefixs['mp4_prefix'] = Config.mp4_prefix
        else:
            self.errmsg = 'mp4_prefix config was None.'
            logger_root.error(self.errmsg)
            raise Exception, '%s' % self.errmsg

        if Config.gif_prefix != '':
            prefixs['gif_prefix'] = Config.gif_prefix
        else:
            self.errmsg = 'gif_prefix config was None.'
            logger_root.error(self.errmsg)
            raise Exception, '%s' % self.errmsg

        if Config.srt_prefix != '':
            prefixs['srt_prefix'] = Config.srt_prefix
        else:
            self.errmsg = 'srt_prefix config was None.'
            logger_root.error(self.errmsg)
            raise Exception, '%s' % self.errmsg

        self.ret = prefixs
        return self.ret
Example #10
0
    def get_prefixs(self):
        prefixs = {}
        if Config.mp4_prefix != '':
            prefixs['mp4_prefix'] = Config.mp4_prefix
        else:
            self.errmsg = 'mp4_prefix config was None.'
            logger_root.error(self.errmsg)
            raise Exception, '%s' % self.errmsg

        if Config.gif_prefix != '':
            prefixs['gif_prefix'] = Config.gif_prefix
        else:
            self.errmsg = 'gif_prefix config was None.'
            logger_root.error(self.errmsg)
            raise Exception, '%s' % self.errmsg

        if Config.srt_prefix != '':
            prefixs['srt_prefix'] = Config.srt_prefix
        else:
            self.errmsg = 'srt_prefix config was None.'
            logger_root.error(self.errmsg)
            raise Exception, '%s' % self.errmsg

        self.ret = prefixs
        return self.ret
 def scp_source_package_to_local(self):
     #一键更新,从远程主机拷贝模块目录 同步到线上目录
     self.is_compress = 'False'
     logger_root.info("scp_source_package_to_local")
     #如果本地主机有模块目录/jar包或者备份目录有更新包,则可以直接更新 无需从远程主机拷贝目录
     if os.path.exists("%s" %  upload_unzip_dir) or os.path.exists("%s" % upload_dir + mod_name + ".jar") or os.path.exists(local_backup_file_prefix):
         return 0
     #获取source server变量
     if cf.has_option(mod_name,'source_host') and cf.has_option(mod_name,'source_path') and cf.has_option(mod_name,'source_user') and cf.has_option(mod_name,'source_password'):
         if cf.has_option(mod_name,'source_port'):
             source_port = cf.get(mod_name,'source_port')
         else:
             source_port = 22
         source_host = cf.get(mod_name,'source_host')
         source_user = cf.get(mod_name,'source_user')
         source_password = cf.get(mod_name,'source_password')
         source_path =cf.get(mod_name,'source_path')
     #从source_host拷贝jar包(只拷贝时间最近的一个包)
         if type == "jar" or type == "war":
             cmd="cd %s;echo $(ls -rt *.%s|tail -1)" % (source_path,type)
             filename=run_command(cmd,user=source_user,port=source_port,password=source_password,host=source_host,stdout="file")
             source_path = cf.get(mod_name,'source_path') + filename
             backup_cmd="scp -q -P%s -r %s@%s:%s %s" % (source_port,source_user,source_host,source_path,upload_dir + mod_name + "." + type)
     #从source_host拷贝模块目录
         else:
             source_path = cf.get(mod_name,'source_path')
             #backup_cmd="scp -q -P %s -r %s@%s:%s %s" % (source_port,source_user,source_host,source_path,upload_unzip_dir)
             backup_cmd="rsync -q -e 'ssh -p %s' -avz --exclude=logs/ --exclude=log/ %s@%s:%s %s" % (source_port,source_user,source_host,source_path+"/",upload_unzip_dir)
         logger_root.info(backup_cmd)
         try:
             outfile=pexpect.run (backup_cmd, events={'(?i)password': source_password+'\n','continue connecting (yes/no)?':'yes\n'},timeout=None)
             logger_root.info(outfile)
         except Exception as e:
             print e
     else:
         logger_root.error("You want make it auto update ,Make sure you define source_host/source_path/source_user/source_password")
         logger_console.error("You want make it auto update ,Make sure you define source_host/source_path/source_user/source_password")
         sys.exit()
 def check_status(self):
     #检测应用启动后是否报错
     logger_root.info("执行check_status函数!")
     if docker_flag == "1" or docker_flag == "2":
         for i in range(15):
             rcmd = '''sudo sh -c "docker exec -i %s ls /usr/local/tomcat/logs/|grep catalina.`date "+%%Y-%%m-%%d"`.log"|grep -v old''' % mod_name
             logger_root.info(rcmd)
             out = run_command(rcmd)
             if out != "":
                 rcmd='''sudo sh -c " docker exec -i %s tail -n 10 /usr/local/tomcat/logs/catalina.`date "+%%Y-%%m-%%d"`.log|grep 'Server startup'" ''' % mod_name
                 logger_root.info(rcmd)
                 out=run_command(rcmd)
                 if out == "":
                     if i == 14:
                         logger_root.error("[%s] 启动失败,未检测到'Server startup'" % self.host)
                         return False
                     time.sleep(10)
                 else:
                     return True
             else:
                 if i == 14:
                     logger_root.error("[%s] 启动失败,未生成日志文件catalina" % self.host)
                     return False
                 time.sleep(5)
     else:
         if type == "java" or type == "war":
             self.webapp=cf.get(self.mod_name,"tomcat_path")
             logger_root.info(self.webapp)
             rcmd="ps aux|grep %s|grep -v grep|awk '{print $2}'" % (self.webapp)
             out = run_command(rcmd)
             if out != '':
             #rcmd='grep -e -i -A500 '%s' %s/logs/catalina.out|grep -e 'Exception|error' %s/logs/catalina.out ' % (self.time,self.webapp)
                 #rcmd='''tail -n 2000  %s/logs/catalina.out|egrep -i -A50 -B30 'Exception|error'  ''' % (self.webapp)
                 rcmd='''while :; do tail -n 10  %s/logs/catalina.out|grep -i -A20 'Exception|error';  tail -n 10  %s/logs/catalina.out |grep 'Server startup' && exit; done  ''' % (self.webapp,self.webapp)
                 outlog=run_command(rcmd)
                 logger_root.info(outlog)
         elif type == "jar":
             rcmd="ps aux|grep %s|grep -v grep|awk '{print $2}'" % (self.mod_name)
             out = run_command(rcmd)
             if out != '':
                 rcmd='''tail -n 2000  %s/logs/err|egrep -i -A50 -B30 'Exception|error' ''' % (self.webapp)
                 outlog=run_command(rcmd)
                 logger_root.error(outlog)
                 logger_console.error(outlog)
         elif type == "nodejs":
             pass
         else:
             return 1
Example #13
0
                    urldicts = urlmerge.get_url(url_dic["data"], mp4_prefix=prefixs['mp4_prefix'], gif_prefix=prefixs['gif_prefix'],
                                                srt_prefix=prefixs['srt_prefix'])  #生成下载信息dict

                    q = init_queue(urldicts)  #初始化下载队列
                    while True:
                        output = commands.getoutput('ps aux|grep wget|grep -v grep|wc -l')
                        if q.empty():
                            logger_root.info('第%d个课程下的所有视频都下载完成!' % i)
                            if i == len(api_get_url_list):
                                send_mail("本地部署course%s" % course_id_list,"所有视频都下载完成!")
                            break
                        elif int(output) <= 10:
                            aa=q.get()
                            download_dir=aa[0]
                            download_url=aa[1]
                            fname = download_dir + os.path.basename(download_url)
                            if not os.path.exists(download_dir):
                                os.system('mkdir %s' % download_dir)
                            logger_root.info('正在下载%s....' % fname)
                            os.system('wget -b -q -N -c -P %s %s' % (download_dir,download_url))
                else:
                    logger_root.error('无法获得下载视频的uuid,原因是第%d个课程id下没有视频!' % i)
                    send_mail("本地部署course%s" % course_id_list,"无法获得下载视频的uuid,原因是第%d个课程id下没有视频!" % i)
            else:
                logger_root.error('无法获得下载视频的uuid,原因是是api调用不成功!')
                send_mail("本地部署course%s" % course_id_list,"无法获得下载视频的uuid,原因是是api调用不成功!")
            i+=1
    else:
        logger_root.error('There is a same progress is running!')

 def mv_upload_file_to_backup_dir(self):
     #判断上传目录中是否有压缩包
     #if cf.has_option(mod_name,'is_compress') and cf.get(mod_name,'is_compress') == 'True':
     logger_root.info("mv_upload_file_to_backup_dir %s" % self.host)
     #如果备份目录有更新包 则不用拷贝
     if os.path.exists(local_backup_file_prefix):
         return 0
     else:
         os.path.exists(local_backup_file_prefix) or os.makedirs(local_backup_file_prefix)
     if self.is_compress == 'True':
         if os.path.exists("%s" % self.upload_file_prefix+".tar.gz") or os.path.exists("%s" % self.upload_file_prefix+".zip"):
     #如果是压缩包先解压
     #复制文件到本地同步目录
             if type == "java":
                 os.path.exists(local_backup_dir) or os.makedirs(local_backup_dir)
                 logger_root.info("chdir",local_backup_dir)
                 os.chdir(local_backup_dir)
                 logger_root.info('mv %s.tar.gz %s 2>/dev/null||mv %s.zip %s 2>/dev/null' % (self.upload_file_prefix,local_backup_dir,self.upload_file_prefix,local_backup_dir))
                 os.system("mv %s.tar.gz %s 2>/dev/null||mv %s.zip %s 2>/dev/null " % (self.upload_file_prefix,local_backup_dir,self.upload_file_prefix,local_backup_dir))
             elif type == "jar" or type == "war":
                 os.chdir(local_backup_file_prefix)
                 logger_root.info("chdir",local_backup_dir)
                 os.system("mv  %s %s" % (upload_dir + mod_name + "." + type,local_backup_file_prefix))
                 logger_root.info("mv %s %s" % (upload_dir + mod_name + "." + type,local_backup_file_prefix))
             elif type == "c" or type == "php" or type == "nodejs":
                 os.path.exists(self.local_backup_dir) or os.makedirs(self.local_backup_dir)
                 os.chdir(self.local_backup_dir)
                 os.system("mv %s.tar.gz %s 2>/dev/null||mv %s.zip %s 2>/dev/null " % (self.upload_file_prefix,self.local_backup_dir,self.upload_file_prefix,self.local_backup_dir))
                 logger_root.info("mv %s.tar.gz %s 2>/dev/null||mv %s.zip %s 2>/dev/null")
             else:
                 logger_root.error("mod_type error")
                 logger_console.error("mod_type error")
                 sys.exit()
             #print os.path.abspath(os.path.curdir)
             os.chdir(local_backup_dir)
             logger_root.info("tar xzf %s.tar.gz 2> /dev/null||unzip %s.zip 2>/dev/null" % (mod_name,mod_name))
             os.system("tar xzf %s.tar.gz 2> /dev/null||unzip %s.zip >/dev/null 2>&1" % (self.mod_name,self.mod_name))
             os.system("rm -f %s.tar.gz 2>/dev/null;rm -f %s.zip >/dev/null 2>&1" % (self.mod_name,self.mod_name))
             logger_root.info("rm -f %s.tar.gz 2>/dev/null;rm -f %s.zip 2>/dev/null" % (mod_name,mod_name))
             if type == "c":
                 os.system("[ -d %s ] && mv %s/* ./ && rmdir %s" % (self.mod_name,self.mod_name,self.mod_name))
         else:
             logger_root.error("You compress flag is True,but your " + upload_dir + "can't find " + self.mod_name + ".zip or " + self.mod_name + ".tar.gz")
             logger_console.error("You compress flag is True,but your " + upload_dir + "can't find " + self.mod_name + ".zip or " + self.mod_name + ".tar.gz")
             sys.exit()
     elif type == "jar" or type == "war" or type == "nodejs":
         #如果没有压缩包 探测是否有jar或者war包
         os.chdir(local_backup_file_prefix)
         logger_root.info("chdir",local_backup_dir)
         if type == "war":
             java_file="."
         elif type == "jar":
             java_file="-1.0-SNAPSHOT."
         if os.path.exists("%s" % upload_dir + mod_name + java_file + type):
             logger_root.info("mv %s %s" % (upload_dir + mod_name + java_file + type,local_backup_file_prefix))
             os.system("mv %s %s" % (upload_dir + mod_name + java_file + type,local_backup_file_prefix))
         else:
             logger_root.error(upload_dir + " can't find " + self.mod_name + java_file + type)
             logger_console.error(upload_dir + " can't find " + self.mod_name + java_file + type)
             sys.exit()
     else:
         #如果没有压缩包 是否有文件夹
         if os.path.exists("%s" %  upload_unzip_dir):
             os.system("mv  %s %s" % (upload_unzip_dir,self.local_backup_dir))
         #如果都没有 退出
         else:
             logger_root.error("You compress flag is  False,But " +upload_dir + " can't find " + self.mod_name + " directory")
             logger_console.error("You compress flag is  False,But " +upload_dir + " can't find " + self.mod_name + " directory")
             sys.exit()
    print cf.sections()
    sys.exit(0)

cp.set("handler_filehander","args",('/home/update/log/%s.log' % mod_name, 'a'))
cp.write(open(log_conf,"w"))
from lib.log import logger_root,logger_console
from lib.addserver import AddServer
from lib.nginx import nginx
from lib.docker import docker
from lib.check_status import CheckStatus,check_all_server

cmd="ps aux|grep update.py |grep %s|grep %s|grep -v grep|wc -l" % (mod_name,action)
out=subprocess.Popen(cmd,shell=True,stdout=subprocess.PIPE,stderr=subprocess.PIPE)
if int(out.stdout.read()) > 1:
    logger_console.error("[%s]进程id已经存在,请不要重复[%s]!如需了解详情,请查看日志!" % (mod_name,action))
    logger_root.error("[%s]进程id已经存在,请不要重复[%s]!如需了解详情,请查看日志!" % (mod_name,action))
    sys.exit(0)

logger_root.info("开始发版!!!!!")
#如果没有指定模块名或者动作,打印错误并退出
if mod_name or action:
    pass
else:
    logger_root.error('''you don't have mod_name and action!\nuse -h get some help''')
    logger_logger_console.error('''you don't have mod_name and action!\nuse -h get some help''')
    sys.exit()

#如果模块不在模块列表中,打印错误信息并退出
if not cf.has_section(mod_name):
    logger_root.error("mod_name %s not in mod_list\nmod_list must in \n %s \n\n see %s get more information" % (mod_name,cf.sections(),mod_file))
    logger_console.error("mod_name %s not in mod_list\nmod_list must in \n %s \n\n see %s get more information" % (mod_name,cf.sections(),mod_file))
Example #16
0
    def run(self):
        logger_root.debug('Starting ' + self.name)
        global tmp_dir
        global queue_lock
        global error_list
        global succeed_list
        global download_list
        global exitFlag
        global err_exit

        while True:
            queue_lock.acquire()
            if not DownloadThread.__queue.empty():
                try:
                    self.q_set = DownloadThread.__queue.get(block=False)
                except:
                    queue_lock.release()
                    break
                else:
                    queue_lock.release()
                # print self.q_set      #打印队列条目
                self.dir = self.q_set[0]
                self.url = self.q_set[1]
                fname = os.path.basename(self.url)
                if self.url in download_list:    #if os.path.exists(tmp_dir + fname): 使用新的更为精确的download_list作为判断条件
                    logger_root.warning('%s duplicate download items %s.' % (self.name, self.url))
                elif not os.path.exists(self.dir + fname):
                    queue_lock.acquire()
                    download_list.add(self.url)
                    queue_lock.release()
                    logger_root.info('%s start download %s.' % (self.name, self.url))
                    try:
                        host = urllib2.urlparse.urlparse(self.url).netloc
                        headers = {'Host': host,
                                   'User-Agent':'Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.101 Safari/537.36',
                                   'Accept':'*/*',
                                   'Connection':'keep-alive'
                                  }
                        req = urllib2.Request(self.url,headers=headers)
                        handle = urllib2.urlopen(req, timeout=120)
                        etag = handle.headers['etag'].strip('"')
                        s_length = int(handle.headers["Content-Length"].strip('"'))
                        d_length = 0
                        with open(tmp_dir + fname, 'wb') as f_handler:
                            while True:
                                if exitFlag:
                                    raise KeyboardInterrupt
                                buf = 4096 if s_length - d_length > 4096 else s_length - d_length
                                if buf == 0:
                                    f_handler.flush()
                                    break
                                chunk = handle.read(buf)
                                # if not chunk:   #改用Content-Length与已下载大小之差来判断
                                #     break
                                if not chunk and s_length != d_length:
                                    raise Exception, 'Network failure appeared in the process of download %s.' % self.url
                                f_handler.write(chunk)
                                f_handler.flush()
                                d_length += len(chunk)
                    except KeyboardInterrupt:
                        while not f_handler.closed:
                            time.sleep(1)
                        if self.check_file(tmp_dir + fname, etag):
                            move(tmp_dir + fname, self.dir + fname)
                            succeed_list.add(self.url)
                            logger_root.info('%s Successful download %s.' % (self.name, self.url))
                        else:
                            os.remove(tmp_dir + fname)
                            # error_list.add((self.dir, self.url))
                            logger_root.warning('%s stop download %s' % (self.name, self.url))
                        break
                    except URLError, e:
                        logger_root.error('%s %s %s' % (self.name, self.url, str(e)))
                        error_list.add((self.dir, self.url))
                        queue_lock.acquire()
                        download_list.discard(self.url)
                        queue_lock.release()
                        continue
                    except socket.timeout, e:
                        os.remove(tmp_dir + fname)
                        logger_root.error('%s %s %s' % (self.name, self.url, str(e)))
                        error_list.add((self.dir, self.url))
                    except IOError, e:
                        os.remove(tmp_dir + fname)
                        logger_root.error('%s %s %s' % (self.name, self.url, str(e)))
                        print traceback.format_exc()
                        break
                    except Exception, e:
                        os.remove(tmp_dir + fname)
                        logger_root.error('%s %s %s' % (self.name, self.url, str(e)))
                        error_list.add((self.dir, self.url))
                        print traceback.format_exc()
Example #17
0
             else:
                 while not f_handler.closed:
                     time.sleep(1)
                 d_length = os.path.getsize(tmp_dir + fname)     #可能并发下载流量太大,磁盘I/O跟不上,增加下载后文件的实际大小,提高对比准确性
                 if s_length != d_length:
                     time.sleep(60)
                     d_length = os.path.getsize(tmp_dir + fname)
                 if self.check_file(tmp_dir + fname, etag):
                     move(tmp_dir + fname, self.dir + fname)
                     succeed_list.add(self.url)
                     logger_root.info('%s Successful download %s.' % (self.name, self.url))
                 else:
                     os.remove(tmp_dir + fname)
                     # move(tmp_dir + fname, '/home/html/lcms/video/' + fname)
                     error_list.add((self.dir, self.url))
                     logger_root.error('%s Incomplete download %s, source file length is %s, downloaded file length is %s.' % (self.name, self.url, s_length, d_length))
             finally:
                 try:
                     handle.close()
                     queue_lock.acquire()
                     download_list.discard(self.url)
                     queue_lock.release()
                 except Exception, e:
                     # logger_root.error('try_finally %s %s %s.' % (self.name, self.url, str(e)))
                     pass
     else:
         queue_lock.release()
         break
 if exitFlag:
     logger_root.debug('receive a signal to exit, [%s] stop.' % self.name)
 else:
Example #18
0
                    srt_prefix=prefixs['srt_prefix'])  #生成下载信息dict

                q = init_queue(urldicts)  #初始化下载队列
                while True:
                    output = commands.getoutput(
                        'ps aux|grep wget|grep -v grep|wc -l')
                    if q.empty():
                        logger_root.info('所有视频都下载完成')
                        send_mail("本地部署school%s" % school_id, "所有视频都下载完成!")
                        break
                    elif int(output) <= 10:
                        aa = q.get()
                        download_dir = aa[0]
                        download_url = aa[1]
                        fname = download_dir + os.path.basename(download_url)
                        if not os.path.exists(download_dir):
                            os.system('mkdir %s' % download_dir)
                        logger_root.info('正在下载%s....' % fname)
                        os.system('wget -b -q -N -c -P %s %s' %
                                  (download_dir, download_url))
            else:
                logger_root.error('无法获得下载视频的uuid,原因可能是此学校id下没有视频!')
                send_mail("本地部署school%s" % school_id,
                          "无法获得下载视频的uuid,原因是可能是api调用不成功!")
        else:
            logger_root.error('无法获得下载视频的uuid,原因是可能是api调用不成功!')
            send_mail("本地部署school%s" % school_id,
                      "无法获得下载视频的uuid,原因是可能是api调用不成功!")
    else:
        logger_root.error('There is a same progress is running!')
Example #19
0
        else:
            self.conn = httplib.HTTPConnection(self.domain, 80, timeout=self.timeout)

    def api_get(self):
        try:
            self.conn.request(method="GET", url=self.query_str)
            resp = self.conn.getresponse()
        except Exception, e:
            self.errmsg = "domain: %s query: %s error: %s" % (self.domain, self.query_str, str(e))
            logger_root.error(self.errmsg)
        else:
            if resp.status == 200:
                self.data = json.loads(resp.read())
            else:
                self.errmsg = "API GET - Http response code(%s):%s" % (resp.status, resp.reason)
                logger_root.error(self.errmsg)
        finally:
            self.conn.close()

        return self.data

    def api_post(self, pos=""):
        headers = {"Content-type": "application/json", "Accept": "application/json"}
        # pos = urllib.urlencode(pos) #RESTful API接受json格式的数据,x-www-form-urlencoded格式的数据才需要进行url编码
        try:
            self.conn.request(method="POST", url=self.query_str, body=pos, headers=headers)
            resp = self.conn.getresponse()
        except Exception, e:
            self.errmsg = "domain: %s query: %s error: %s" % (self.domain, self.query_str, str(e))
            logger_root.error(self.errmsg)
        else: