예제 #1
0
    def search_extension(self, domain, extensions, threads):
        """
            This method loads the extensions from the database and searches for installed extensions.
                /typo3conf/ext/:        Local installation path. This is where extensions usually get installed.
                /typo3/ext/:            Global installation path (not used atm)
                /typo3/sysext/:         Extensions shipped with core
        """
        found_extensions = {}
        thread_pool = ThreadPool()
        for ext in extensions:
            thread_pool.add_job((request.head_request,
                                 ('{}/typo3conf/ext/{}/'.format(domain, ext))))
            thread_pool.add_job((request.head_request,
                                 ('{}/typo3/sysext/{}/'.format(domain, ext))))
            #thread_pool.add_job((request.head_request, ('{}/typo3/ext/{}/'.format(domain, ext))))
        thread_pool.start(threads)

        for installed_extension in thread_pool.get_result():
            name = installed_extension[1][:-1]
            name = name[name.rfind('/') + 1:]
            found_extensions[name] = {
                'url': installed_extension[1],
                'version': None,
                'file': None
            }
        return found_extensions
예제 #2
0
    def __init__(self):
        print("DeployWorker")
        self.deploy_id = -1
        self.deploy_form_id = -1
        self.deploy_type = None
        self.conn = -1
        self.driver_name = None

        self.pool = ThreadPool()
        self.msg_handler = RabbitWrapper()
        self.msg_handler.createWorkerConsumer(self.callbackWorker)
        self.error_msg_handler = RabbitWrapper()
        self.error_msg_handler.createErrorConsumer(self.callbackCancelTask)
예제 #3
0
    def search_extension(self, domain, extensions):
        """
			This method searches for installed extensions.
				/typo3conf/ext/:		Local installation path. This is where extensions get usually installed.
				/typo3/ext/:			Global installation path (not used atm)
				/typo3/sysext/:			Extensions shipped with core (not used atm)
		"""
        config = json.load(
            open(os.path.join(self.__path, 'lib', 'config.json')))
        thread_pool = ThreadPool()
        for ext in extensions:
            thread_pool.add_job((Request.head_request,
                                 (domain.get_name(), '/typo3conf/ext/' + ext)))
            #thread_pool.add_job((Request.head_request, (domain.get_name(), '/typo3/ext/' + ext)))
            #thread_pool.add_job((Request.head_request, (domain.get_name(), '/typo3/sysext/' + ext)))
        thread_pool.start(config['threads'])

        for installed_extension in thread_pool.get_result():
            domain.set_installed_extensions(installed_extension[1][1])
예제 #4
0
	def search_extension(self, domain, extensions):
		"""
			This method searches for installed extensions.
				/typo3conf/ext/:		Local installation path. This is where extensions get usually installed.
				/typo3/ext/:			Global installation path (not used atm)
				/typo3/sysext/:			Extensions shipped with core (not used atm)
		"""
		config = json.load(open(os.path.join(self.__path, 'lib', 'config.json')))
		thread_pool = ThreadPool()
		for ext in extensions:
			thread_pool.add_job((Request.head_request, (domain.get_name(), '/typo3conf/ext/' + ext)))
			#thread_pool.add_job((Request.head_request, (domain.get_name(), '/typo3/ext/' + ext)))
			#thread_pool.add_job((Request.head_request, (domain.get_name(), '/typo3/sysext/' + ext)))
		thread_pool.start(config['threads'])

		for installed_extension in thread_pool.get_result():
			domain.set_installed_extensions(installed_extension[1][1])
예제 #5
0
	def search_ext_version(self, domain, extension_dict):
		"""
			This method adds a job for every installed extension.
			The goal is to find a ChangeLog or Readme in order to determine the version.
		"""
		config = json.load(open('lib/config.json'))
		thread_pool = ThreadPool()
		for extension_path in extension_dict:
			thread_pool.add_job((Request.head_request, (domain.get_name(), extension_path + '/ChangeLog')))
			thread_pool.add_job((Request.head_request, (domain.get_name(), extension_path + '/ChangeLog.txt')))
			thread_pool.add_job((Request.head_request, (domain.get_name(), extension_path + '/Readme.txt')))
			thread_pool.add_job((Request.head_request, (domain.get_name(), extension_path + '/README.md')))
			thread_pool.add_job((Request.head_request, (domain.get_name(), extension_path + '/README.rst')))
		
		thread_pool.start(config['threads'], True)

		for changelog_path in thread_pool.get_result():
			ext, path = self.parse_extension(changelog_path)
			domain.set_installed_extensions_version(path, ext[4])
예제 #6
0
	def do_sharded_load(self, from_db, from_t, to_db, to_t, options):
		#from threading import Thread
		r=None
		
		status=1		
		assert self.is_set('SKIP_INDEX_MAINTENANCE') == 'TRUE', 'Cannot shard without SKIP_INDEX_MAINTENANCE = TRUE'
		assert self.is_set('IF_DPL_SERIAL') == '0', 'Cannot shard without IF_DPL_SERIAL = 0'
		#(r, status) =self.do_load(from_db, from_t, to_db, to_t, ptin)
		nosh = self.is_set('NUM_OF_SHARDS')
		assert nosh, 'NUM_OF_SHARDS is undefined.'
		nosh=int(nosh)
		maxt=20
		sharded_part={}
		if nosh:
			pprint(to_t)
			self._logger.info('Sharding table %s.%s' % tuple(from_t) )
			if 1: #optimize
				(r_int, status)=self.get_common_cols(from_db, from_t, to_db, to_t)
			(shards, status)=self.get_tab_shards(nosh, from_db, from_t,options)
			#pprint(shards)
			#print status
			#sys.exit(1)
			assert status==0,'Cannot fetch shards.'
			start = time.time()
			#queue = Queue.Queue()
			# Create a pool with three worker threads
			prev_count=activeCount()
			pool_size = len(shards)
			assert pool_size<=maxt, 'Too many shards.'
			if pool_size>maxt:
				pool_size=maxt
			if pool_size<1:
				pool_size=3
			pool = ThreadPool(pool_size)

			# Insert tasks into the queue and let them run
			i =0
			self.pool_cntr=[]
			
			for ln in shards:
				shard = ln[0].split('||')
				#print shard
				#time.sleep(1)
				#options['_SHARD']=shard
				#pprint(options)
				#sys.exit(1)
				shpart=shard[3]
				if shpart:
					sharded_part[shpart]=1
				pool.queueTask(self.do_load, (from_db, from_t, to_db, to_t,{'_PARTITION':options.get('_PARTITION'),'_SUBPARTITION':options.get('_SUBPARTITION'),'_SHARD':shard}), self.taskCallback)
				#del options['_SHARD']
				if i==0  and 0:
					break
				i +=1
			# When all tasks are finished, allow the threads to terminate
			pool.joinAll()
			
			#print 'pool.__threads',pool.__threads
			#print 'threads left ==============', len(self.pool_cntr)
			#time.sleep(1)
			#import threading
			while activeCount()>1: #pool.getThreadCount()>0:
				print '%s: Waiting for tpool %s' % (self._logger.getElapsedSec() , activeCount() - prev_count)
				#print activeCount()
				if len(shards)==len(self.pool_cntr):
					break
				time.sleep(1)
			
			#print 'threads left ==============', len(self.pool_cntr)
			print "Elapsed Time: %s" % (time.time() - start)
			start = time.time()
			if self.p_if("IF_REBUILD_UNUSABLE_INDEXES") and len(shards)>0: #logging indexes for rebuild
				part=options.get('_PARTITION')
				if part:
					print 'Index partition', part
					self.rebuild_tab_indexes(to_db,('.'.join(to_t),to_t),{'_PARTITION':part})
				else:
					pprint(sharded_part)
					#sys.exit(1)
					if sharded_part:
						for part in sharded_part.keys():
							print 'Sharded index partition', part
							self.rebuild_tab_indexes(to_db,('.'.join(to_t),to_t),{'_PARTITION':part})
					else:
						self.rebuild_tab_indexes(to_db,('.'.join(to_t),to_t),{})
				print "Elapsed Time: %s" % (time.time() - start)
			

		else:
			self._logger.warn('Passing sharded load. NUM_OF_SHARDS = %s' % nosh)
		#sys.exit(1)
		return (r, status) 
예제 #7
0
    def search_ext_version(self, found_extensions, threads):
        """
            This method adds a job for every installed extension.
            The goal is to find a file with version information.
        """
        thread_pool = ThreadPool()
        for extension,values in found_extensions.items():
            thread_pool.add_job((request.version_information, (values['url'] + 'Documentation/ChangeLog/Index.rst', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'Documentation/Changelog/Index.rst', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'Documentation/Settings.cfg', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'Documentation/Settings.yml', '(?:release:)\s?([0-9]+\.[0-9]+\.?[0-9]?[0-9]?)')))
            thread_pool.add_job((request.version_information, (values['url'] + 'Settings.yml', '(?:release:)\s?([0-9]+\.[0-9]+\.?[0-9]?[0-9]?)')))
            thread_pool.add_job((request.version_information, (values['url'] + 'Documentation/ChangeLog', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'Documentation/Index.rst', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'composer.json', '(?:"dev-master":|"version":)\s?"([0-9]+\.[0-9]+\.?[0-9x]?[0-9x]?)')))
            thread_pool.add_job((request.version_information, (values['url'] + 'Index.rst', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'doc/manual.sxw', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'ChangeLog', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'CHANGELOG.md', None)))
            thread_pool.add_job((request.version_information, (values['url'] + 'ChangeLog.txt', None)))
        
        thread_pool.start(threads, version_search=True)

        for version_path in thread_pool.get_result():
            path = version_path[0][0]
            version = version_path[1]
            name = version_path[0][0]
            if 'Documentation/' in name:
                name = name[:name.rfind('Documentation/')+1]
            if 'doc/' in name:
                name = name[:name.rfind('doc/')+1] 
            name = name[name.find('ext/')+4:name.rfind('/')]
            found_extensions[name]['version'] = version
            found_extensions[name]['file'] = path
        return found_extensions
예제 #8
0
class DeployWorker():

    def __init__(self):
        print("DeployWorker")
        self.deploy_id = -1
        self.deploy_form_id = -1
        self.deploy_type = None
        self.conn = -1
        self.driver_name = None

        self.pool = ThreadPool()
        self.msg_handler = RabbitWrapper()
        self.msg_handler.createWorkerConsumer(self.callbackWorker)
        self.error_msg_handler = RabbitWrapper()
        self.error_msg_handler.createErrorConsumer(self.callbackCancelTask)

    def getDeployRequest(self):
        return DeployRequest.objects.filter(id=self.deploy_id)[:1].get()

    def getDeployNodeList(self):
        return DeployHost.objects.filter(deploy_id=self.deploy_id)

    def getDeployParms(self):
        self.getDeployNodeList()
        test_parms = []
        deploy_request = self.getDeployRequest()
        # for item in detail:
        #     print("module %s" % item.module)
        #     module = item.module
        host_list = self.getDeployNodeList()
        for host in host_list:
            parm = {}
            parm["host_ip"] = host.host_ip
            parm["deploy_id"] = host.deploy_id.id
            parm["deploy_type"] = host.deploy_type
            parm["system"] = deploy_request.system
            parm["product"] = deploy_request.product
            parm["service_name"] = deploy_request.module
            test_parms.append(parm)
            print(test_parms)
        return test_parms

    def runDeploy(self, host_ip, deploy_id, deploy_type, service_name, product, system):
        print("runDeploy")
        print(host_ip, deploy_id, deploy_type, service_name, product, system)
        t = HostNode(host_ip, deploy_id, deploy_type, service_name, product, system)
        t.pre_check()

    def startDeploy(self, cmd_msg):
        self.deploy_id = cmd_msg['deploy_id']
        self.deploy_form_id = cmd_msg['deploy_form_id']
        self.deploy_type = cmd_msg['deploy_type']
        self.conn = cmd_msg['conn']
        self.product = cmd_msg['product']
        self.system = cmd_msg['system']

        task_parms = self.getDeployParms()
        print("task_parms %s" % task_parms)
        self.pool.setMaxWorkers(self.conn)
        self.pool.addTask(self.runDeploy)
        # print("run task")
        self.pool.runTaskPerParm(task_parms)

    def updateCancelToDB(self):
        print("updateCancelToDB")
        DeployHost.objects.filter(deploy_id=self.deploy_id, status=DeployHost.STATUS_NEW).\
            update(status=DeployHost.STATUS_CANCEL)
        # DeployHost.objects.filter(deploy_id=self.deploy_id).exclude(
        #     status=DeployHost.STATUS_FAIL).exclude(
        #     status=DeployHost.STATUS_SUCCESS).update(
        #     status=DeployHost.STATUS_CANCEL)

    def callbackWorker(self, ch, method, properties, body):
        print("callbackWorker")
        cmd_msg = self.msg_handler.parseMsg(body)
        print(cmd_msg)
        if cmd_msg == -1:
            print("error msg do nothing just return")
        else:
            print("start crtl thread")
            self.startDeploy(cmd_msg)
        ch.basic_ack(delivery_tag=method.delivery_tag)
        # raise Exception("test callback exception")
        # obj = threading.Thread(target=deploy_ctl_thread, args=[parm])
        # obj.start()

    def callbackCancelTask(self, ch, method, properties, body):
        print("callbackCancelTask")
        parm = self.error_msg_handler.parseMsg(body)

        if parm == -1:
            print("error msg do nothing just return")
            ch.basic_ack(delivery_tag=method.delivery_tag)
            return

        if parm['deploy_id'] == self.deploy_id:
            print("cancel task is in current process, proceed it")
            self.pool.cancelTask()
            self.updateCancelToDB()
            # ch.basic_ack(delivery_tag=method.delivery_tag)
        else:
            print("ignore the cancel request")

        ch.basic_ack(delivery_tag=method.delivery_tag)

    def listenWorkerQueue(self):
        self.msg_handler.recvMsg()

    def listenErrQueue(self):
        self.error_msg_handler.recvMsg()
예제 #9
0
    #                'host_ip': '172.27.53.31', 'deploy_id': 1, 'deploy_type': 'DEPLOY'}]
    #
    # def runDeploy(host_ip, deploy_id, deploy_type, service_name, product, system):
    #     print("runDeploy")
    #     print(host_ip, deploy_id, deploy_type, service_name, product, system)
    #     t = HostNode(host_ip,deploy_id,deploy_type,service_name,product,system)
    #     t.pre_check()
    #
    def load_url(index, str):
        # if str == "error":
        #     raise ThreadPoolError
        # print("--------------------")
        sleep(2)
        # print("this task is %s %s" % (index, str))

    pool = ThreadPool()
    pool.setMaxWorkers(20)
    pool.addTask(load_url, 1, "chenxi")
    # pool.runTaskPerNum(20000)
    obj = Thread(target=pool.runTaskPerNum, args=[200])
    obj.start()
    # sleep(2)
    print(len(pool.threads))
    sleep(1)
    print(len(pool.threads))
    pool.cancelTask()
    # pool.runTaskPerParm(test_parms)

    # t = HostNode(**test_parms)
    # t.pre_check()
    # worker = DeployWorker()
예제 #10
0
 def __init__(self):
     self.rf = config()
     self.thp = ThreadPool()
     self.log = LogSys()
     self.cr = CRedis()
     pass
예제 #11
0
class ExcuteErrLog(object):

    def __init__(self):
        self.rf = config()
        self.thp = ThreadPool()
        self.log = LogSys()
        self.cr = CRedis()
        pass

    def __getconf(self, sect, key):

        return self.rf.getvalue(sect, key)
        pass

    def __choosemodule(self, value):
        print value
        result={
            1 : self.__getconf('logpath_conf', 'ecg_errlog'),
            2 : self.__getconf('logpath_conf', 'run_errlog'),
            3 : self.__getconf('logpath_conf', 'doctor_errlog'),
            4 : self.__getconf('logpath_conf', 'ecg_dblog'),
            5 : self.__getconf('logpath_conf', 'run_dblog'),
            6 : self.__getconf('logpath_conf', 'doctor_dblog'),
            7 : self.__getconf('logpath_conf', 'web_errlog'),
            8 : self.__getconf('logpath_conf', 'web_dblog'),
            9 : self.__getconf('logpath_conf', 'mysqllog')
        }
        print result.get(value)
        return result.get(value)

    def __timet(self):
        TIMEFORMAT = '%Y/%m/%d'                # Y 2016年  y 16年
        nowtime = time.strftime(TIMEFORMAT, time.localtime())
        return nowtime

    def showcount(self, logcont):
        _count = 0
        _dex = 0
        _rdex = 0
        _listcont = []
        ttime = self.__timet()
        for line in logcont:
            _dex += 1
            if ttime in line:
                if _count == 0:
                    _rdex = _dex
                _count += 1
        if _rdex:
            _listcont =  logcont[(_rdex-1):]
        _listcont.append(_count)
        return _listcont

    def getpinlog(self, dev, num):
        _rc = 0
        time.sleep(0.1)
        filepath = self.__choosemodule(num)
        ec = ExcuteConn(dev, 1)
        key = "%s%s" %(dev, num)
        comm = 'cat %s' % str(filepath)
        comm1 = 'cat %s' % (str(filepath) + ".1")
        print comm
        print comm1
        rc1 = ec.allexcute(comm)
        rc2 = ec.allexcute(comm1)
        if rc2 != 0 and rc1 != 0 :
            logcont = (rc2 + rc1)
        elif rc1 != 0:
            logcont =  rc1
        else:
            logcont = []
        print logcont
        listcont = self.showcount(logcont)
        listcont.append(len(listcont))
        if listcont:
            for mm in listcont:
                #rc = self.cr.pipline(key, mm)
                # 封装额pipeline方法有问题= =
                _rc = self.cr.rrpush(key, mm)
            # self.cr.llrange(key, (-int(self.cr.llrange(key, -1, -1)[0])), -1)
            # 在列表中最后写入最后一次插入这个列表的长度
            return _rc
        else:
            return 9

    def geterrnum(self, dev, num):
        newkey = "lognum"
        okey = "%s%s" %(dev, num)
        if self.cr.iexist(okey):
            value = self.cr.llrange(okey, -2, -2)[0]
        else:
            value = '0'
        newvalue = "%s%s-%s" %(dev, num, value)
        self.cr.rrpush(newkey,newvalue)
        pass

    def getmain(self, dev, num):
        #print "%s" %(dev)
        time.sleep(0.1)
        print "%s-%s" %(dev, num)

    def threadexcu(self):
        serverlist = ['web_m', 'web_b', 'web_s']
        for dev in serverlist:
            for i in range(1, 9):
                self.thp.creatappend(self.getpinlog, [dev, i])
        self.thp.thgo()

    def insertnum(self):
        self.cr.remove("lognum")
        serverlist = ['web_m', 'web_b', 'web_s']
        for dev in serverlist:
            for i in range(1, 9):
                self.thp.creatappend(self.geterrnum, [dev, i])
        self.thp.thgo()

    def getnum(self):
        nl = (',').join(self.cr.llrange("lognum", 0, -1))
        return nl

    def getlog(self, key):
        if self.cr.iexist(key) and int(self.cr.llrange(key, -2, -2)[0]):
            log = ('^.').join(self.cr.llrange(key, (-int(self.cr.llrange(key, -1, -1)[0])), -3))
        else:
            log = "No log."
            #log = ('@').join(self.cr.llrange('test1141', -33, -4))
        return log

    def sendmail(self, mail):
        fbash = self.__getconf('script_path', 'sendlogmail')
        commd = "%s %s" %(fbash, mail)
        ec = ExcuteConn('support', 0)
        ec.allexcute(commd)
        return 0
예제 #12
0
    def search_ext_version(self, domain, extension_dict):
        """
			This method adds a job for every installed extension.
			The goal is to find a ChangeLog or Readme in order to determine the version.
		"""
        config = json.load(open('lib/config.json'))
        thread_pool = ThreadPool()
        for extension_path in extension_dict:
            thread_pool.add_job(
                (Request.head_request, (domain.get_name(),
                                        extension_path + '/ChangeLog')))
            thread_pool.add_job(
                (Request.head_request, (domain.get_name(),
                                        extension_path + '/ChangeLog.txt')))
            thread_pool.add_job(
                (Request.head_request, (domain.get_name(),
                                        extension_path + '/Readme.txt')))
            thread_pool.add_job(
                (Request.head_request, (domain.get_name(),
                                        extension_path + '/README.md')))
            thread_pool.add_job(
                (Request.head_request, (domain.get_name(),
                                        extension_path + '/README.rst')))

        thread_pool.start(config['threads'], True)

        for changelog_path in thread_pool.get_result():
            ext, path = self.parse_extension(changelog_path)
            domain.set_installed_extensions_version(path, ext[4])