Ejemplo n.º 1
0
	def applySettingsLocale(self, key) :
		if key == "application/locale/force_main_lang" :
			force_main_lang = self.__settings.value("application/locale/force_main_lang").toString()
			if force_main_lang != self.mainLang() :
				self.__locale = ( Qt.QLocale() if force_main_lang.isEmpty() else Qt.QLocale(force_main_lang) )
				self.localeChangedSignal(self.__locale.name())
				Logger.debug(Qt.QString("Accepted new locale \"%1\"").arg(self.__locale.name()))
Ejemplo n.º 2
0
class Smartcommand:

    def __init__(self, daemon, smartcmd_id = 0):
        self.logger = Logger(False, LOG_FILE);
        self.logger.info('Started SMARTCMD');
        self.smartcmd_id = smartcmd_id;
        self.sql = MasterSql();
        self.daemon = daemon;

    def launch_smartcmd(self, json_obj, connection):
        if (self.smartcmd_id == 0):
            self.logger.error('Invalid Smartcommand');
            return;

        query = 'SELECT room_device_id, option_id, option_value, time_lapse FROM smartcommand_elems WHERE smartcommand_id ="'+ str(self.smartcmd_id) +'" ORDER BY exec_id';
        res = self.sql.mysql_handler_personnal_query(query);
        delay_color = 0;
        for r in res:
            obj = {};
            obj['sync'] = 0;
            data = {};
            data['room_device_id'] = r[0];
            data['option_id'] = r[1];
            data['value'] = r[2];
            obj['data'] = data;
            obj['packet_type'] = 'smartcmd_launch';
            delay = r[3];
            if (data['option_id'] ==  392 or data['option_id'] ==  393 or data['option_id'] ==  394):
                delay_color = delay_color + 1;
            if (delay > 0 and delay_color <= 1):
                time.sleep(delay);
            if (delay_color >= 3):
                delay_color = 0;
            self.daemon.send_to_device(obj, connection);
Ejemplo n.º 3
0
	def loadInfo(self, dict_name) :
		dict_name = str(dict_name)

		dict_file_name = Utils.joinPath(Const.AllDictsDirPath, dict_name)
		dict_file = Qt.QFile(dict_file_name)
		dict_file_stream = Qt.QTextStream(dict_file)
		if not dict_file.open(Qt.QIODevice.ReadOnly) :
			Logger.warning(Qt.QString("Cannot open dict file \"%1\" for reading info").arg(dict_file_name))
			return

		self.__info_cache_dict[dict_name] = {}
		for all_tags_list_item in AllTagsList :
			self.__info_cache_dict[dict_name][all_tags_list_item] = Qt.QString()

		while not dict_file_stream.atEnd() :
			Qt.QCoreApplication.processEvents(Qt.QEventLoop.ExcludeUserInputEvents)
			line = dict_file_stream.readLine()

			if line.isEmpty() :
				continue
			if line[0] != "#" and line.contains("  ") :
				break

			if line[0] == "#" :
				line.remove(0, 1)
				line = line.trimmed()

				key = MiscTag
				for key_item in self.__info_cache_dict[dict_name].keys() :
					tag = Qt.QString(key_item+":")
					if line.startsWith(tag) :
						line = line.remove(0, tag.length()).simplified()
						key = str(key_item)
						break
				if not self.__info_cache_dict[dict_name][key].isEmpty() :
					self.__info_cache_dict[dict_name][key].append("<br>")
				self.__info_cache_dict[dict_name][key].append(line)

		dict_file.close()

		###

		self.__info_cache_dict[dict_name][FileSizeTag] = Qt.QString().setNum(dict_file.size() / 1024)

		lang_codes_dict = LangsList.langCodes()
		direction_regexp = Qt.QRegExp("((..)-(..))")
		if direction_regexp.exactMatch(self.__info_cache_dict[dict_name][DirectionTag]) :
			icon_width = icon_height = Qt.QApplication.style().pixelMetric(Qt.QStyle.PM_SmallIconSize)
			self.__info_cache_dict[dict_name][DirectionTag] = (
				Qt.QString("<img src=\"%3\" width=\"%1\" height=\"%2\"> &#187; <img src=\"%4\" width=\"%1\" height=\"%2\">"
					"&nbsp;&nbsp;&nbsp;%5 &#187; %6 (%7)").arg(icon_width).arg(icon_height)
						.arg(IconsLoader.iconPath(Utils.joinPath("flags", direction_regexp.cap(2))))
						.arg(IconsLoader.iconPath(Utils.joinPath("flags", direction_regexp.cap(3))))
						.arg(LangsList.langName(direction_regexp.cap(2), lang_codes_dict))
						.arg(LangsList.langName(direction_regexp.cap(3), lang_codes_dict))
						.arg(direction_regexp.cap(1)) )

		for tag_key in self.__info_cache_dict[dict_name].keys() :
			if self.__info_cache_dict[dict_name][tag_key].isEmpty() :
				self.__info_cache_dict[dict_name][tag_key] = tr("Unavailable")
Ejemplo n.º 4
0
def init():
    global colorPairs

    Logger.put('%d color pairs supported.' % (curses.COLOR_PAIRS))
    Logger.put('curses.hasColors() = %d' % (curses.has_colors()))

    if (colorPairs != None):
        Logger.put('Colors.init() called more than once!')
        return
    if (curses.COLOR_PAIRS < 64):
        Logger.put('Cannot start unless there is support for 64 color pairs or more!')
        Logger.put('Your system has: %d pairs.' % (curses.COLOR_PAIRS))
        sys.exit(1)

    pairCount = 1
    colorPairs = {}
    for c1 in colors:
        colorPairs[c1] = {}
        for c2 in colors:
#            if (c2 == WHITE and c1 == BLACK):
#                continue
            #Logger.put('%s & %s = %d' % (colorValueToName[c2], colorValueToName[c1], pairCount))
            curses.init_pair(pairCount, c2, c1)
            colorPairs[c1][c2] = pairCount
            pairCount = pairCount + 1
    colorPairs[BLACK][WHITE] = 0
Ejemplo n.º 5
0
class Engine:
    def __init__(self):
        self.logger = Logger()

    def start(self,sock):
        (peerIP,peerPort) = sock.getpeername()

        while True:
            # Receive message from boss
            try:
                msg = sock.recv(1024).strip()
                self.logger.log("DEBUG","Boss said: {0}".format(msg))



                if msg.upper() == "BYE":
                    sock.sendall("[echo]:Bye")
                    sock.close()


                sock.sendall("[echo]:{0}".format(msg))
            except socket.error:
                self.logger.log("INFO","Bye! Master({0}:{1})".format(peerIP,peerPort))
                sock.close()
                break
Ejemplo n.º 6
0
 def __init__(self, keyspace="Twitter",column_family=None):
    if(not column_family):
       logger.subsection("ERROR: no column family specified")
       return 
    self.keyspace = keyspace
    self.column_family = column_family
    return 
Ejemplo n.º 7
0
        def taskComplete(self, nodeUUID, jobUUID, task_nr, client_info):
            """
            Mark task as DONE
            """

            render_server = master.Master().getRenderServer()

            node = render_server.getNode(nodeUUID)
            if node is None:
                return False

            if not node.hasTask(jobUUID, task_nr):
                # Active task mistmatch. Happens after jobs after network
                # lags and jobs reassignment

                Logger.log(('Attempt to copmlete task {0} of job {1}' +
                            'from wrong node {1} (ip {2})') .
                    format(task_nr, jobUUID, nodeUUID,
                           client_info['address'][0]))

                return False

            # Unassign task from node
            node.unassignTask(jobUUID, task_nr)

            job = render_server.getJob(jobUUID)
            if job is None:
                return False

            return render_server.taskComplete(job, task_nr)
Ejemplo n.º 8
0
class IP_IRManager:

    def __init__(self):
        self.logger = Logger(False, LOG_FILE);
        self.sql = MasterSql();

    def send_to_gc(self, json_obj, dev, hostame):
        ir_addr = dev['addr_dst'];
        tcp_ip = dev['addr'];
        tcp_port = int(dev['plus1']);
        if not tcp_port:
            tcp_port = int(TCP_PORT);

        request = str(ir_addr);
        request += '\r';

        data = '';
        try:
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM);
            s.connect((tcp_ip, tcp_port));
            s.send(request.encode());
            data = s.recv(BUFFER_SIZE).decode();
            s.close();
        except Exception as e:
            self.logger.error(e);
        self.logger.info("Received Global Cache data :" + str(data));
Ejemplo n.º 9
0
 def executeSeisSolgen( self,
                        i_pathToSeisSolGen,
                        i_commandLineParameters ):
     l_bashCommand = i_pathToSeisSolGen + ' ' + i_commandLineParameters
     l_logger.log('generating matrix kernels using '+l_bashCommand, 2)
     l_bashProcess = subprocess.Popen(l_bashCommand.split(), cwd='.', stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
     l_logger.log( l_bashProcess.communicate()[0], 3)
Ejemplo n.º 10
0
def monitor_loop():
    """Capture any incoming messages and log to CSV file"""

    radio.receiver()

    while True:
        # See if there is a payload, and if there is, process it
        if radio.is_receive_waiting():
            trace("receiving payload")
            payload = radio.receive()
            try:
                decoded = OpenThings.decode(payload)
                now = time.time()
            except OpenThings.OpenThingsException as e:
                warning("Can't decode payload:" + str(e))
                continue
                      
            OpenThings.showMessage(decoded, timestamp=now)
            # Any device that reports will be added to the non-persistent directory
            Registry.update(decoded)
            ##trace(decoded)
            Logger.logMessage(decoded)

            # Process any JOIN messages by sending back a JOIN-ACK to turn the LED off
            if len(decoded["recs"]) == 0:
                # handle messages with zero recs in them silently
                print("Empty record:%s" % decoded)
            else:
                # assume only 1 rec in a join, for now
                if decoded["recs"][0]["paramid"] == OpenThings.PARAM_JOIN:
                    mfrid     = OpenThings.getFromMessage(decoded, "header_mfrid")
                    productid = OpenThings.getFromMessage(decoded, "header_productid")
                    sensorid  = OpenThings.getFromMessage(decoded, "header_sensorid")
                    Messages.send_join_ack(radio, mfrid, productid, sensorid)
Ejemplo n.º 11
0
def do_parse_recent(regex, date_str):
    Logger.debug('do parse: ' + regex + " - " + date_str)
    now = datetime.date.today()
    delta = datetime.timedelta(days=7)
    start = now - delta
    (day1, ndays) = calendar.monthrange(start.year, start.month)
    return (start.year, start.month, start.day, now.year, now.month, now.day)
Ejemplo n.º 12
0
    def __init__(self, core):
        self.core = core

        Logger.info(logger, 'Module loaded.')

        if not self.core.files:
            Logger.error(logger, "Error connect to files manager in core!")
Ejemplo n.º 13
0
 def logParams(self, name):
    sep = ''
    paramsOut = Logger.logFile(name + 'Params')
    contextsOut = Logger.logFile(name + 'Contexts')
    for featureCounter in self.featureCounters:
       if isinstance(featureCounter, KNFeatureChain) or \
          isinstance(featureCounter, KNFeatureGrid):
          featureCounter.logParams(paramsOut, contextsOut)
          continue
       featureName = featureCounter.feature.name
       contexts = sorted(featureCounter.contextCounts.iteritems(),
             key=lambda (key, count): (count, key), reverse=True)
       for key,count in contexts:
          if count > 0:
             contextsOut.log(featureName + '\t' + _contextToStr(key) + '\t' +
                   str(count))
       counts = sorted(featureCounter.counts.iteritems(),
             key=lambda (key, count): (key[0], count, key[1]), reverse=True)
       for key,count in counts:
          if count > 0:
             paramsOut.log(featureName + '\t' + _contextToStr(key[0]) + '\t'
                   + _contextToStr(key[1]) + '\t' + str(count))
    self.mixer.logParams(name)
    paramsOut.close()
    contextsOut.close()
Ejemplo n.º 14
0
def withdraw(req, arg):
	"""%withdraw <address> [amount] - Sends 'amount' coins to the specified dogecoin address. If no amount specified, sends the whole balance"""
	if len(arg) == 0:
		return req.reply(gethelp("withdraw"))
	acct = Irc.account_names([req.nick])[0]
	if not acct:
		return req.reply_private("You are not identified with freenode services (see /msg NickServ help)")
	if Transactions.lock(acct):
		return req.reply_private("Your account is currently locked")
	if len(arg) == 1:
		amount = max(Transactions.balance(acct) - 1, 1)
	else:
		try:
			amount = parse_amount(arg[1], acct, all_offset = -1)
		except ValueError as e:
			return req.reply_private(str(e))
	to = arg[0]
	if not Transactions.verify_address(to):
		return req.reply_private(to + " doesn't seem to be a valid dogecoin address")
	token = Logger.token()
	try:
		tx = Transactions.withdraw(token, acct, to, amount)
		req.reply("Coins have been sent, see http://dogechain.info/tx/%s [%s]" % (tx, token))
	except Transactions.NotEnoughMoney:
		req.reply_private("You tried to withdraw Ɖ%i (+Ɖ1 TX fee) but you only have Ɖ%i" % (amount, Transactions.balance(acct)))
	except Transactions.InsufficientFunds:
		req.reply("Something went wrong, report this to mniip [%s]" % (token))
		Logger.irclog("InsufficientFunds while executing '%s' from '%s'" % (req.text, req.nick))
Ejemplo n.º 15
0
 def post(self):
     result = {'status': False}
     Logger.debug('in register')
     try:
         user_id = self.get_argument('user_id', '').lower()
         user_name = self.get_argument('user_name', '').lower()
         password = self.get_argument('password', '')
         lang = self.get_argument('lang', 'zh-CN')
         Logger.debug('user_id:' + user_id + ', user_name:' + user_name + ', password:'******'' or user_name == '' or password == '':
             self.write(json.dumps(result))
             return
         
         user = MongoHelper.get_user_by_id(user_id)
         if user is not None:
             self.write(json.dumps(result))
             return
         
         user = {'user_id': user_id, 'user_name': user_name, 'password': password, 'lang': lang}
         server = MongoHelper.allocate_user_server()
         user['server'] = server
         user['token'] = Utils.generate_access_token(user_id)
         MongoHelper.register_user(user)
         MongoHelper.increase_server_usage(server, 1)
         result['status'] = True
         user["_id"] = '';
         result['user'] = user
         result['token'] = user['token']
         result['server'] = user['server']
         
         Utils.create_face_group(user_id)
     finally:
         self.write(json.dumps(result))
Ejemplo n.º 16
0
 def writeFile(self, location, datName, dict):
     """
     Is called when you want the objec to write out a dictionary of values to the specified datFile
     
     Args:
         | location (String): The absolute location specified from the Worker/Engine as to where the datFile is to be written
         | datName (String): Name for the Datfile
         | dict (Dictionary): Takes a dictionary of q, i, errors
     
     """
         
         
     self.location = location
     self.datName = datName
     self.data = dict
     
     #Used for checking if the folder exists.. will be used later in engine
     if not os.path.exists(self.location):
         os.makedirs(self.location)
                
     loc = self.location+self.datName                
     f = open(loc, 'w')
     f.write(self.datName + "\n")
     formatting  = '%'+str(4 + self.accuracy)+'s %'+str(6 + self.accuracy)+'s %'+str(6 + self.accuracy)+'s \n'
     f.write(formatting % ('q', 'I', 'Err')) #Needed for string formatting
     for i in range(len(self.data['q'])):
         formatting = '%'+str(8 + self.accuracy)+'.'+str(self.accuracy)+'f %'+str(6 + self.accuracy)+'.'+str(self.accuracy)+'f %'+str(6 + self.accuracy)+'.'+str(self.accuracy)+'f \n'
         f.write(formatting % (self.data['q'][i], self.data['i'][i], self.data['errors'][i]))        
     f.close()
     Logger.logger(self.datName, "DatFile Written")
Ejemplo n.º 17
0
 def post(self):
     result = {'status': False}
     Logger.debug('in payment')
     try:
         user_id = self.get_argument('user_id', '')
         token = self.get_argument('token', '')
         plan = self.get_argument('plan','')
         Logger.info('userid: ' + user_id + ', token:' + token + ', plan:' + plan)
         
         if user_id == '':
             Logger.debug('user id null')
             return
         
         user = MongoHelper.get_user_by_id(user_id)
         if user is None:
             Logger.debug('user none')
             return
         if token is not user['token']:
             self.write(json.dumps(result))
             Logger.debug('token wrong')
             return
         
         result['quota'] = Utils.update_user_payment(user_id, plan)
         result['status'] = True
             
     finally:
         self.write(json.dumps(result))
Ejemplo n.º 18
0
class IP_IRManager:

    ## The constructor.
    def __init__(self):
        ## Logger object for formatting and printing logs
        self.logger = Logger(False, LOG_FILE)
        ## SQL object for managing database
        self.sql = MasterSql()

    ## Sends a packet to global cache.
    #
    # @param json_obj Not used here.
    # @param dev Object containing the device informations to who send the packet.
    # @param hostname Not used here.
    # @return None
    def send_to_gc(self, json_obj, dev, hostname):
        ir_addr = dev["addr_dst"]
        tcp_ip = dev["addr"]
        tcp_port = int(dev["plus1"])
        if not tcp_port:
            tcp_port = int(TCP_PORT)
        request = str(ir_addr)
        request += "\r"
        data = ""
        try:
            s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
            s.connect((tcp_ip, tcp_port))
            s.send(request.encode())
            data = s.recv(BUFFER_SIZE).decode()
            s.close()
        except Exception as e:
            self.logger.error(e)
        self.logger.debug("Received Global Cache data :" + str(data))
Ejemplo n.º 19
0
def vectorizeMatrix( i_pathToDenseMatrix,
                     i_baseName,
                     i_pathToOutputDirectory,
                     i_drawPlots ):
  l_logger.log('vectorizing: '+ i_pathToDenseMatrix, 2)

  # read matrix
  l_denseMatrix = mmread( i_pathToDenseMatrix )

  # create list for the vectorized result.
  l_vectorMatrix = list()

  # do the vectorization
  for l_i in xrange(len(l_denseMatrix[:,0])):
    l_logger.log('current column: ' + str(l_i), 3)
    l_vectorMatrix.append( computeCoveringIntervals(l_denseMatrix[:,l_i], 4) )

  # write vectorized matrix to disk
  writeSparseVectorizedMatrices( l_denseMatrix, transpose(l_vectorMatrix), i_baseName+'_vec', i_pathToOutputDirectory)

  # draw plots
  if i_drawPlots:
    l_matrixConverter.plotSparsityPattern( l_denseMatrix,
                                           i_baseName,
                                           i_pathToOutputDirectory )

    plotVectorization( l_vectorMatrix,
                       i_baseName,
                       i_pathToOutputDirectory )
Ejemplo n.º 20
0
	def privmsg(self, targ, text, priority = None):
		Logger.log("c", self.instance + ": %s <- %s " % (targ, text))
		for i in xrange(0, len(text), 350):
			if priority:
				Irc.instance_send(self.instance, ("PRIVMSG", targ, text[i:i+350]), priority = priority)
			else:
				Irc.instance_send(self.instance, ("PRIVMSG", targ, text[i:i+350]))
Ejemplo n.º 21
0
        def InitGL(self):
            self.custom_init()
            Logger.trace("info",  "EchoesGLCanvas.InitGL()")

            # set viewing projection 
            # done in OnSize

            # model projection mode
            glMatrixMode(GL_MODELVIEW)
            glClearDepth(1.0)
            
            glEnable(GL_DEPTH_TEST)
            glEnable(GL_NORMALIZE)

            glEnable(GL_COLOR_MATERIAL)
            glEnable(GL_BLEND)
            glBlendFunc(GL_SRC_ALPHA, GL_ONE_MINUS_SRC_ALPHA)

            glShadeModel(GL_SMOOTH)
            glHint(GL_LINE_SMOOTH_HINT, GL_NICEST)
            glEnable(GL_LINE_SMOOTH)

            glEnable(GL_LIGHTING)
            glEnable(GL_LIGHT0)

            self.setLight(0.8)
            self.targetLightLevel = 0.8
                        
            self.lineWidthRange = glGetIntegerv(GL_LINE_WIDTH_RANGE)
            try:
                self.lineWidthRange[1]
            except IndexError:
                Logger.warning( "*** HACK *** setting lineWidthRange manually")
                self.lineWidthRange = [1, 10]
Ejemplo n.º 22
0
    def waitForStatusLineUpdateConfirmation(self, inputHandler = None):

        if (inputHandler == None):
            inputHandler = InputHandler.SingletonKeyboardInputHander(self.screen)

        promptMessage = 'PRESS A KEY'
        promptMessageLength = len(promptMessage)

        y = self.getTerminalHeight() - 1
        x = self.getTerminalWidth() - self.maxSideBarLen - 1
        promptMessageAttr = Colors.BLINK | Colors.getPairNumber('BLACK', 'YELLOW')

        self.screen.addnstr(y, x, promptMessage, promptMessageAttr)
        
        self.draw()

        curses.flash() #Visual bell

        keyPressed = inputHandler.waitForKey()

        Logger.put(' key found: %s' % (str(keyPressed)))

        self.resetStatusLinesAppended()
        #erase the prompt message
        self.screen.addnstr(y, x, ' ' * promptMessageLength, promptMessageAttr)

        self.draw()
Ejemplo n.º 23
0
        def OnEndScenario(self, evt):
            self.scenario = None
            if evt.name == "Intro" or evt.name == "BubbleWorld" or "Garden" in evt.name:

                # Introduce a new transition bubble except in the Intro scene
                trans_bubble = None
                if evt.name == "Intro":
                    for id,object in self.objects.items():
                        if isinstance(object, objects.Bubbles.EchoesBubble):
                            trans_bubble=object
                            break
                    for id, object in self.sceneElements.items():
                        if isinstance(object, environment.Menu.UserMenu):
                            object.remove(False)                    
                                            
                if not trans_bubble:
                    trans_bubble = objects.Bubbles.EchoesBubble(self.app, True, fadeIn=True, fadingFrames=100)
                    trans_bubble.setStartPos((0,0,0))
                
                trans_bubble.interactive = False
                trans_bubble.colour = "red"
                trans_bubble.moving = True
                trans_bubble.setTargetPos((self.orthoCoordWidth, self.orthoCoordWidth / self.aspectRatio, self.orthoCoordDepth))
                trans_bubble.removeAtTargetPos = True
                trans_bubble.removeAction = "PublishScenarioEnded"
                trans_bubble.callback = evt.callback
                trans_bubble.removeActionArgs = evt.name                                

            else:
                Logger.warning("Unknown scenario in endScenario: " + evt.name)
                evt.callback.ice_response()
                
            self.renderPiavca = False
            self.currentScene = None
Ejemplo n.º 24
0
 def OnRemoveObject(self, evt):
     if int(evt.objId) in self.app.canvas.objects:
         o = self.app.canvas.objects[int(evt.objId)]
         o.remove()
     else:
         Logger.warning("No object " + evt.objId + " in world, not removing")
     evt.callback.ice_response()
Ejemplo n.º 25
0
def get_images_by_tags_array(user_id, tags_list, image):
    Logger.debug("get_images_by_tags_array: " + str(tags_list))
    image_res = []
    for tags in tags_list:
        img_list = get_image_by_tags(user_id, tags)
        image_res.append(set(img_list))
    image_res.append(set(image))

    Logger.debug("get_images_by_tags_array set list: " + str(image_res))
    # [[set(), set()...]]
    inter_sec = []
    if len(image_res) > 1:
        for i in range(1, len(image_res) + 1):
            inter = []
            for i in combinations(image_res, i):
                res = set.intersection(*i)
                inter.append(res)

            inter_sec.append(inter)
    else:
        inter_sec = [image_res]

    final_list = []
    inter_sec.reverse()
    image_pool = set()
    for i in inter_sec:
        for s in i:
            if not s in final_list:
                insert_set = s - (s & image_pool)
                if insert_set:
                    final_list.append(insert_set)
                image_pool = image_pool | s

    return final_list
Ejemplo n.º 26
0
def _nick(instance, source, newnick):
	nick = Irc.get_nickname(source)
	for channel in Global.account_cache:
		if nick in Global.account_cache[channel]:
			Global.account_cache[channel][newnick] = Global.account_cache[channel][nick]
			Logger.log("w", "%s -> %s in %s" % (nick, newnick, channel))
			del Global.account_cache[channel][nick]
Ejemplo n.º 27
0
def get_similar_tags(user_id, input_tags):
    filename = get_user_path(user_id) + "/" + "image_indexer.dat"
    stored_tags = mc.get(user_id + "_image")
    if not stored_tags:
        if not os.path.exists(filename):
            stored_tags = [[], []]
        else:
            with open(filename, "rb") as fp:
                stored_tags = pickle.load(fp)

    if stored_tags is None:
        return None

    Logger.debug("indexer keys: " + str(stored_tags[0]))
    result = []
    for input_tag in input_tags:
        rate_index = []
        tag_list = []
        for stored_tag in stored_tags[0]:
            rate = fuzz.ratio(input_tag, stored_tag)
            if rate >= 80:
                tag_list.insert(bisect.bisect(rate_index, rate), stored_tag)
                bisect.insort(rate_index, rate)
        result.append(tag_list)

    return result
Ejemplo n.º 28
0
def connect_instance(instance):
	Logger.log("c", instance + ": Connecting")
	try:
		host = random.choice(socket.gethostbyname_ex(Config.config["host"])[2])
		sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
		if Config.config.get("ssl", None):
			sock = ssl.wrap_socket(sock, ca_certs = Config.config["ssl"]["certs"], cert_reqs = ssl.CERT_REQUIRED)
		sock.connect((host, Config.config["port"]))
		sock.settimeout(0.1)
	except socket.error as e:
		type, value, tb = sys.exc_info()
		Logger.log("me", "ERROR while connecting " + instance)
		Logger.log("me", repr(e))
		Logger.log("me", "".join(traceback.format_tb(tb)))
		del Global.instances[instance]
		threading.Thread(target = reconnect_later, args = (60, instance)).start()
		return
	writer = threading.Thread(target = writer_thread, args = (instance, sock))
	reader = threading.Thread(target = reader_thread, args = (instance, sock))
	Global.instances[instance].reader_dying.clear()
	Global.instances[instance].reader_dead.clear()
	Global.instances[instance].writer_dying.clear()
	Global.instances[instance].writer_dead.clear()
	Global.instances[instance].lastsend = time.time()
	writer.start()
	reader.start()
	Logger.log("c", instance + ": Initiating authentication")
	instance_send(instance, ("CAP", "REQ", "sasl"), lock = False)
	instance_send(instance, ("NICK", instance), lock = False)
	instance_send(instance, ("USER", Config.config["user"], "*", "*", Config.config["rname"]), lock = False)
Ejemplo n.º 29
0
	def applyUserStyleCss(self, send_signal_flag) :
		user_style_css_file_path = self.userStyleCssPath()
		user_style_css_file = Qt.QFile(user_style_css_file_path)
		user_style_css_file_stream = Qt.QTextStream(user_style_css_file)

		if not user_style_css_file.exists() :
			if user_style_css_file.open(Qt.QIODevice.WriteOnly) :
				Logger.debug(Qt.QString("Created empty CSS file \"%1\"").arg(user_style_css_file_path))
			else :
				Logger.warning(Qt.QString("Cannot open CSS file \"%1\" for reading").arg(user_style_css_file_path))
			user_style_css_file.close()

		if self.__user_style_css_watcher.files().count() < 1 :
			 self.__user_style_css_watcher.addPath(user_style_css_file_path)

		if user_style_css_file.open(Qt.QIODevice.ReadOnly) :
			Logger.debug(Qt.QString("Apply user CSS from \"%1\"").arg(user_style_css_file_path))
			user_style_css = Qt.QString("%1\n%2\n").arg(DefaultCss).arg(user_style_css_file_stream.readAll())
			user_style_css.remove(Qt.QRegExp("/\\*([^*]|\\*[^/]|\\n)*\\*/"))
			user_style_css_file.close()

			if self.__css.trimmed() != user_style_css.trimmed() :
				self.__css = user_style_css
				if send_signal_flag :
					Logger.debug("CSS has been updated")
					self.cssChangedSignal()
		else :
			Logger.warning(Qt.QString("Cannot open CSS file\"%1\" for reading").arg(user_style_css_file_path))
Ejemplo n.º 30
0
def update_image_indexer(user_id, img):
    filename = get_user_path(user_id) + "/" + "image_indexer.dat"
    indexer = mc.get(user_id + "_image")
    if not indexer:
        if not os.path.exists(filename):
            indexer = {}
        else:
            with open(filename, "rb") as fp:
                indexer = pickle.load(fp)

    if indexer is None:
        return

    for tag in img["tags"]:
        if indexer[0].count(tag) is 0:
            indexer[0].append(tag)
            indexer[1].append([img["image_name"]])
        else:
            tag_index = indexer[0].index(tag)
            indexer[1][tag_index].append(img["image_name"])

    with open(filename, "wb") as fp:
        pickle.dump(indexer, fp)

    mc.set(user_id + "_image", indexer)
    Logger.debug("image indexer updated: " + str(indexer))
# Copyright (C) 2017 Siavoosh Payandeh Azad, Stephen Oyeniran
# for each new function we start from empty set!
import Logger
import sys
import copy
import itertools
import time
import package

package.generate_folders(package.generated_files_folder)
sys.stdout = Logger.Logger(package.generated_files_folder)

if "-sp" in sys.argv[1:]:
    saf_output_patterns_file_name = package.generated_files_folder + "/" + "SAF" + sys.argv[
        sys.argv.index('-sp') + 1]
else:
    saf_output_patterns_file_name = package.generated_files_folder + "/" + "SAFpatterns.txt"


def check_if_sufficient(function_dict, function_id_1, function_id_2,
                        list_patterns, debug, verbose):
    or_op = "0" * package.data_width
    if debug:
        print "\t--------------------"
        print "\tchecking if sufficient number of ones reached!"
        print "\t\tline\top1\t\top2\t\tfunc_1 \t\t func_2\t\txor(1,2)\tand(1,xor)\tor(prev_or,and)"
        print "\t\t" + "------------------------------------------" * 3
    for i in list_patterns:
        xor_op = format(
            int(function_dict[i][function_id_1], 2)
            ^ int(function_dict[i][function_id_2], 2),
Ejemplo n.º 32
0
    def _updateVtecRecords(self, vtecRecords, newRecords, issueTime):
        '''Merges the previous active table and new records into a new table.

        Keyword Arguments:
        vtecRecords -- list of dictionaries representing existing vtecRecords,
          non-consolidated form.
        newRecords -- list of dictionaries representing new vtecRecords to be
          merged, non-consolidated form.
        issueTime -- time of issuance, in units of milliseconds since epoch of
          Jan 1 1970 at 0000z.

        Returns a tuple of 3 values:
           updated table -- merged results
           purged records -- vtecRecords that were purged
           changeFlag -- True if anything has changed.
        '''

        updatedTable = []
        changedFlag = False


        #delete "obsolete" records from the old table.
        vts = VTECTableSqueeze(issueTime)
        vtecRecords, tossRecords = vts.squeeze(vtecRecords)
        for r in tossRecords:
            r['state'] = "Purged"
        del vts
        if len(tossRecords):
            changedFlag = True

        #expand out any 000 UGC codes, such as FLC000, to indicate all
        #zones. We do this by finding existing records with the 'id'
        #that matches.
        newRecExpanded = []
        compare1 = ['phen', 'sig', 'officeid', 'etn']
        for newR in newRecords:
            if newR['id'][3:6] == "000":
                for oldR in vtecRecords:
                    if self.vtecRecordCompare(oldR, newR, compare1) and \
                      oldR['id'][0:2] == newR['id'][0:2] and \
                      (oldR['act'] not in ['EXP', 'CAN', 'UPG'] or \
                       oldR['act'] == 'EXP' and oldR['endTime'] > issueTime):
                        newE = copy.deepcopy(newR)
                        newE['id'] = oldR['id']
                        newRecExpanded.append(newE)
            else:
                newRecExpanded.append(newR)
        newRecords = newRecExpanded
        
        # match new records with old records, with issue time is different
        # years and event times overlap. Want to reassign ongoing events
        # from last year's issueTime to be 12/31/2359z, rather than the
        # real issuetime (which is this year's).
        cyear = time.gmtime(issueTime)[0]  #current year issuance time
        lastYearIssueTime = time.mktime((cyear-1, 12, 31, 23, 59, 
          0, -1, -1, -1))
        compare = ['phen', 'sig', 'officeid', 'etn']
        for newR in newRecords:
            for oldR in vtecRecords:
                if self.vtecRecordCompare(oldR, newR, compare):
                  oldYear = time.gmtime(oldR['issueTime'])[0]
                  newYear = time.gmtime(newR['issueTime'])[0]
                  if oldYear < newYear and self._vtecRecordsOverlap(oldR, newR):
                      LogStream.logVerbose("Reset issuance time to last year:",
                        "\nNewRec: ", self.printEntry(newR),
                        "OldRec: ", self.printEntry(oldR))
                      newR['issueTime'] = lastYearIssueTime
                      LogStream.logVerbose("Changed To:", self.printEntry(newR))
                  

        # split records out by issuance year for processing
        newRecDict = {}   #key is issuance year
        oldRecDict = {}
        years = []
        for newR in newRecords:
            issueYear = time.gmtime(newR['issueTime'])[0]
            records = newRecDict.get(issueYear, [])
            records.append(newR)
            newRecDict[issueYear] = records
            if issueYear not in years:
                years.append(issueYear)

        for oldR in vtecRecords:
            issueYear = time.gmtime(oldR['issueTime'])[0]
            records = oldRecDict.get(issueYear, [])
            records.append(oldR)
            oldRecDict[issueYear] = records
            if issueYear not in years:
                years.append(issueYear)

        # process each year
        compare = ['id', 'phen', 'sig', 'officeid']
      
        for year in years:
            newRecords = newRecDict.get(year,[])
            oldRecords = oldRecDict.get(year,[])

            # now process the old and new records
            for oldR in oldRecords:
 
                keepflag = True
                for newR in newRecords:
    
                    if newR['act'] == "ROU":
                        continue
    
                    if self.vtecRecordCompare(oldR, newR, compare):
                        #we don't keep older records with same etns
                        if newR['etn'] == oldR['etn']:
                            keepflag = False   #don't bother keeping this record
                            break
    
                        #higher etns
                        elif newR['etn'] > oldR['etn']:
                            #only keep older etns if end time hasn't passed
                            #or old record is UFN and CAN:
                            ufn = oldR.get('ufn', 0)
                            if issueTime > oldR['endTime'] or \
                              (oldR['act'] == "CAN" and ufn) or \
                              oldR['act'] in ['EXP','UPG','CAN']:
                                keepflag = False
                                break
    
                        #lower etns, ignore (keep processing)
    
                if not keepflag:
                    oldR['state'] = "Replaced"
                    changedFlag = True
                updatedTable.append(oldR)

        #always add in the new records (except for ROU)
        compare = ['id', 'phen', 'sig', 'officeid', 'etn']
        for year in newRecDict.keys():
            newRecords = newRecDict[year]
            for newR in newRecords:
                if newR['act'] != "ROU":

                    #for COR, we need to find the original action, and 
                    #substitute it.
                    if newR['act'] == "COR":
                        for rec in updatedTable:
                            if self.vtecRecordCompare(rec, newR, compare):
                                LogStream.logVerbose(\
                                  "COR record matched with:",
                                  "\nNewRec: ", self.printEntry(newR),
                                  "OldRec: ", self.printEntry(rec),
                                  "\nReassign action to: ", rec['act'])
                                newR['act'] = rec['act']
                                break
                        #due to above code, this should never execute
                        if newR['act'] == "COR":
                            LogStream.logProblem("COR match not found for:\n",
                              self.printEntry(newR), "\nRecord discarded.")
                              
                    if newR['act'] != "COR":
                        updatedTable.append(newR)
                        changedFlag = True

        return updatedTable, tossRecords, changedFlag
Ejemplo n.º 33
0
    def ingestVTEC(self, vtecAnalyzedRecords, vtecRecords, issueTime):
        '''Performs the ingester operation for VTEC codes.

        Keyword Arguments:
        vtecAnalyzedRecords -- List of dictionaries representing calculated
          vtec records to be merged into the vtecRecords.  Non-consolidated
          by id.
        vtecRecords -- List of dictionaries representing already issued vtec
          records from the database. Non-consolidated by id
        issueTime -- current time for processing records, same as issuance time.
          Units are milliseconds since epoch of Jan 1 1970 0000z. Affects
          purging of old records.

        Returns a dictionary containing the following elements, which are
        generally vtecRecords based on action performed:    
           updatedVtecRecords -- vtecRecords merge final results
           replacedRecords -- vtecRecords that were replaced
           decodedRecords -- vtecRecords that were decoded (incoming analyzed)
           otherRecords -- other vtecRecords not affected
           purgedRecords -- vtecRecords that were purged
           changedFlag -- indicates whether any records were changed
        '''

        LogStream.logDebug("Ingest VTEC...............")

        # ensure we have an issueTime in the records, and a 'state'
        for rec in vtecAnalyzedRecords:
            rec['issueTime'] = issueTime
            rec['state'] = 'Decoded'

        #add in the Previous state to the existing vtec records
        for r in vtecRecords:
            r['state'] = "Previous"

        #perform the merging
        updatedTable, purgeRecords, changedFlag = \
          self._updateVtecRecords(vtecRecords, vtecAnalyzedRecords,
          issueTime)

        replaced  = [r for r in updatedTable if r['state'] == "Replaced"]
        decoded = [r for r in updatedTable if r['state'] == "Decoded"]
        other = [r for r in updatedTable if r['state'] not in ['Replaced', 'Decoded']]
        self._vtecRecords = [r for r in updatedTable if r['state'] not in ['Replaced', 'Purged']]

        #strip out the "state" field
        for r in self._vtecRecords:
            del r['state']

        # return the information about changes
        d = {}
        d['updatedVtecRecords'] = self._vtecRecords
        d['replacedRecords'] = replaced
        d['decodedRecords'] = decoded
        d['otherRecords'] = other
        d['purgedRecords'] = purgeRecords
        d['dbChanged'] = changedFlag

        LogStream.logDebug("Updated VTEC Records:\n", self.printVtecRecords(
          self._vtecRecords, combine=True))
        LogStream.logDebug("Replaced VTEC Records:\n", self.printVtecRecords(
          replaced, combine=True))
        LogStream.logDebug("Decoded VTEC Records:\n", self.printVtecRecords(
          decoded, combine=True))
        LogStream.logDebug("Purged VTEC Records:\n", self.printVtecRecords(
          purgeRecords, combine=True))

        return d
Ejemplo n.º 34
0
def usage():
    '''Prints bot usage'''
    print("Schedule Bot by piLigrym and Reni Min.\n\nUsage:\n"
          "python schedule.py -c '<config_path>'\n\nOptions:\n\n"
          "-h, --help\t-\thelp\n-c, --config\t-\tPath to configuration file")


if __name__ == '__main__':
    try:
        opts, args = getopt.getopt(sys.argv[1:], "hc:", ["help", "config="])
    except getopt.GetoptError as err:
        print(str(err))
        usage()
        sys.exit(2)
    config = None
    for o, a in opts:
        if o in ("-h", "--help"):
            usage()
            sys.exit()
        elif o in ("-c", "--config"):
            config = a
        else:
            assert False, "unhandled option"
    BotConfig(config)
    bot = ScheduleBot(Updater(BotConfig.token))
    addHandlers(bot)
    try:
        bot.run()
    except:
        Logger.log(traceback.format_exc())
Ejemplo n.º 35
0
#  See the License for the specific language governing permissions and
#  limitations under the License.

import time

import Constants
import Logger
import Style
import Utils
from AbstractTreeWidget import AbstractTreeWidget
from AbstractWidgetItem import AbstractWidgetItem
from ItemDelegate import JobThinProgressBarDelegate
from Manifest import QtCore, QtGui, QtWidgets, opencue
from MenuActions import MenuActions

logger = Logger.getLogger(__file__)

COLUMN_COMMENT = 1
COLUMN_EAT = 2
COLUMN_MAXRSS = 13

FONT_BOLD = QtGui.QFont("Luxi Sans", -1, QtGui.QFont.Bold)


def getEta(stats):
    if stats.runningFrames:
        remaining = (((stats.pendingFrames - 1) * stats.avgFrameSec) +
                     stats.highFrameSec)
        if remaining:
            return Utils.secondsToHHHMM(remaining / stats.runningFrames)
    return "-"
Ejemplo n.º 36
0
class VmfShell(object):
    """
		Interative shell to Vmfcat. The shell can be use to build a VMF message.
	"""
    CMD_SAVE = 'save'
    CMD_LOAD = 'load'
    CMD_SEARCH = 'search'
    CMD_SET = 'set'
    CMD_SHOW = 'show'
    CMD_HEADER = 'header'
    CMD_HELP = 'help'
    CMD_QUIT = 'quit'

    PROMPT = "<<< "

    def __init__(self, _output=sys.stdout):
        """
			Initializes the user interface by defining a Logger object
			and defining the standard output.
		"""
        self.output = _output
        self.logger = Logger(_output, _debug=True)

    def start(self):
        """
			Starts the main loop of the interactive shell.
		"""

        # Command entered by the user
        cmd = ""
        self.logger.print_info(
            "Type 'help' to show a list of available commands.")

        while (cmd.lower() != VmfShell.CMD_QUIT):
            try:
                self.output.write(VmfShell.PROMPT)
                user_input = sys.stdin.readline()
                tokens = user_input.rstrip().split()
                cmd = tokens[0]
                if (cmd.lower() == VmfShell.CMD_QUIT):
                    pass
                elif (cmd.lower() == VmfShell.CMD_HELP):
                    if (len(tokens) == 1):
                        self.logger.print_info("{:s} <field>|all".format(
                            VmfShell.CMD_SHOW))
                        self.logger.print_info("{:s} <field> <value>".format(
                            VmfShell.CMD_SET))
                        self.logger.print_info(
                            "{:s} [field] {{bin, hex}}".format(
                                VmfShell.CMD_HEADER))
                        self.logger.print_info("{:s} <field>".format(
                            VmfShell.CMD_HELP))
                        self.logger.print_info("{:s} <field>".format(
                            VmfShell.CMD_SEARCH))
                        self.logger.print_info("{:s} <file>".format(
                            VmfShell.CMD_SAVE))
                        self.logger.print_info("{:s} <file>".format(
                            VmfShell.CMD_LOAD))
                        self.logger.print_info("{:s}".format(
                            VmfShell.CMD_QUIT))
                    else:
                        param = tokens[1]
                        if (param in Params.__dict__.keys()):
                            help_msg = Params.parameters[param]['help']
                            self.logger.print_info(help_msg)
                            if (len(Params.parameters[param]['choices']) > 0):
                                choices_msg = ', '.join([
                                    choice for choice in
                                    Params.parameters[param]['choices']
                                ])
                                self.logger.print_info(
                                    "Available values: {:s}".format(
                                        choices_msg))
                        else:
                            self.logger.print_error(
                                "Unknown parameter/option: {:s}.".format(
                                    param))
                elif (cmd.lower() == VmfShell.CMD_SHOW):
                    #
                    # Displays the value of the given field
                    #
                    if (len(tokens) == 2):
                        param = tokens[1]
                        if (param in Params.parameters.keys()):
                            value = Params.__dict__[param]
                            if (isinstance(value, int)):
                                value = "0x{:02x}".format(value)
                            self.logger.print_info("{} = {}".format(
                                param, value))
                        elif param.lower() == "all":
                            for p in Params.parameters.keys():
                                value = Params.__dict__[p]
                                self.logger.print_info("{} = {}".format(
                                    p, value))
                        else:
                            self.logger.print_error(
                                "Unknown parameter/option {:s}.".format(param))

                    else:
                        self.logger.print_error("Usage: {s} <field>".format(
                            VmfShell.CMD_SHOW))
                elif (cmd.lower() == VmfShell.CMD_SET):
                    #
                    # Sets a field with the given value
                    #
                    # TODO: Issues with parameters with boolean values
                    if (len(tokens) >= 3):
                        param = tokens[1]
                        value = ' '.join(tokens[2:])
                        if (param in Params.__dict__.keys()):
                            if (Params.parameters[param]["choices"]):
                                if (value in Params.parameters[param]
                                    ["choices"]):
                                    Params.__dict__[param] = value
                                    new_value = Params.__dict__[param]
                                    self.logger.print_success(
                                        "{:s} = {:s}".format(param, new_value))
                                else:
                                    self.logger.print_error(
                                        "Invalid value ({:s}) for field {:s}.".
                                        format(value, param))
                                    self.logger.print_info(
                                        "Values for field are : {:s}.".format(
                                            ','.join(
                                                str(Params.parameters[param]
                                                    ["choices"]))))
                            else:
                                Params.__dict__[param] = value
                                new_value = Params.__dict__[param]
                                self.logger.print_success("{:s} = {:s}".format(
                                    param, new_value))
                        else:
                            self.logger.print_error(
                                "Unknown parameter {:s}.".format(param))
                    else:
                        self.logger.print_error(
                            "Usage: {:s} <field> <value>".format(
                                VmfShell.CMD_SET))
                elif (cmd.lower() == VmfShell.CMD_HEADER):
                    field = "vmfversion"
                    fmt = "bin"

                    if (len(tokens) >= 2):
                        field = tokens[1]

                    if (len(tokens) == 3):
                        fmt = tokens[2]

                    vmf_factory = Factory(_logger=self.logger)
                    vmf_message = vmf_factory.new_message(Params)
                    vmf_elem = vmf_message.header.elements[field]

                    if (isinstance(vmf_elem, Field)):
                        vmf_value = vmf_elem.value
                    elif (isinstance(vmf_elem, Group)):
                        vmf_value = "n/a"
                    else:
                        raise Exception(
                            "Unknown type for element '{:s}'.".format(field))

                    vmf_bits = vmf_elem.get_bit_array()
                    output = vmf_bits

                    if (fmt == "bin"):
                        output = vmf_bits.bin
                    if (fmt == "hex"):
                        output = vmf_bits.hex

                    self.logger.print_success("{}\t{}\t{}".format(
                        field, vmf_value, output))
                elif (cmd.lower() == VmfShell.CMD_SEARCH):
                    keyword = ' '.join(tokens[1:]).lower()
                    for p in Params.parameters.keys():
                        help = Params.parameters[p]['help']
                        if (p.lower() == keyword or keyword in help.lower()):
                            self.logger.print_success("{:s}: {:s}".format(
                                p, help))
                elif (cmd.lower() == VmfShell.CMD_SAVE):
                    if len(tokens) == 2:
                        file = tokens[1]

                        tmpdict = {}
                        for param in Params.parameters.keys():
                            value = Params.__dict__[param]
                            tmpdict[param] = value

                        with open(file, 'w') as f:
                            json.dump(tmpdict, f)

                        self.logger.print_success(
                            "Saved VMF message to {:s}.".format(file))
                    else:
                        self.logger.print_error(
                            "Specify a file to save the configuration to.")
                elif (cmd.lower() == "test"):
                    if (len(tokens) == 2):
                        vmf_params = tokens[1]
                    else:
                        vmf_params = '0x4023'
                    s = BitStream(vmf_params)
                    bstream = BitStream('0x4023')
                    vmf_factory = Factory(_logger=self.logger)
                    vmf_message = vmf_factory.read_message(bstream)
                elif (cmd.lower() == VmfShell.CMD_LOAD):
                    if len(tokens) == 2:
                        file = tokens[1]
                        with open(file, 'r') as f:
                            param_dict = json.load(f)
                            for (param, value) in param_dict.iteritems():
                                Params.__dict__[param] = value
                        self.logger.print_success(
                            "Loaded VMF message from {:s}.".format(file))
                    else:
                        self.logger.print_error(
                            "Specify a file to load the configuration from.")

                else:
                    self.logger.print_error(
                        "Unknown command {:s}.".format(cmd))
            except Exception as e:
                self.logger.print_error("An exception as occured: {:s}".format(
                    e.message))
                traceback.print_exc(file=sys.stdout)
Ejemplo n.º 37
0
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

import Logger
from Component import (ComponentBase, RuntimeComponent, UninstallComponent,
                       InstallComponent)

LOG = Logger.getLogger("install.quantum")


class QuantumUninstaller(UninstallComponent):
    def __init__(self, *args, **kargs):
        pass


class QuantumInstaller(InstallComponent):
    def __init__(self, *args, **kargs):
        pass


class QuantumRuntime(RuntimeComponent):
    def __init__(self, *args, **kargs):
        pass
Ejemplo n.º 38
0
 def __init__(self):
     music_logger = Logger.Logger('music_all.log', 'debug')
     music_logger.logger.debug('This is a test log.')
Ejemplo n.º 39
0
 def __init__(self):
     self._DirectNotify__categories = {}
     self.logger = Logger.Logger()
     self.streamWriter = None
Ejemplo n.º 40
0
hasMore = False
#定义配置文件
basePath = "/home/xiaolvquan/xiaolvquan/cron-job"
logPath = "/home/xiaolvquan/xiaolvquan/cron-job/logs"
globalConfFile = "%s/conf/global.conf" % basePath
globalConf = ParseConf.ParseConf(globalConfFile)
#获取 api 参数
appKey = globalConf.readGet('jdUnionApi', 'appKey')
appSecret = globalConf.readGet('jdUnionApi', 'appSecret')
hostUrl = globalConf.readGet('jdUnionApi', 'hostUrl')
#获取数据库配置
mdb = MySqlConnection.MySqlConnection()
#md5 转换
md5 = lambda pwd: hashlib.md5(pwd).hexdigest()
currentDay = time.strftime('%Y-%m-%d', time.localtime(time.time()))
log = Logger.Logger(logPath + "/fetchOrders_" + currentDay + ".log",
                    level='debug')
unionTagPinGou = ['00000100', '00000111']


def getSign(parameters):
    """ 签名
    @param parameters dict: uri请求参数(包含除signature外的公共参数)
    """
    if "signature" in parameters:
        parameters.pop("signature")
    # NO.1 参数排序
    _my_sorted = sorted(parameters.items(),
                        key=lambda parameters: parameters[0])
    # NO.2 排序后拼接字符串
    canonicalizedQueryString = ''
    for (k, v) in _my_sorted:
Ejemplo n.º 41
0
'''
Created on Jun 30, 2011

@author: benjaminjcampbell



'''
import collections
import Logger

log = Logger.Logger()

createdBy = 'COConver Utility'

import clr
clr.AddReference('CruiseDAL')
clr.AddReference('System')
from System import Array
from System import Object
from CruiseDAL import DAL


def toFloat(val):
    return float(val) if val else 0.0


#utility method for getting float values from metakit
#used for catching obviously corrupt values
#e(epsilon) is the smallest a value can be, before it is considered 0
#max is the largest value we can reasonably expect
Ejemplo n.º 42
0
async def on_guild_join(guild):
    db.create_guild(guild.id, guild.name, guild.owner.id)
    await Logger.info("%s joined a new guild: %s" %
                      (client.user.name,
                       paint.color(guild.name + " (%s)" % guild.id, "white")))


@client.event
async def on_guild_remove(guild):
    db.delete_guild(guild.id)
    await Logger.info("%s left a guild: %s" %
                      (client.user.name,
                       paint.color(guild.name + " (%s)" % guild.id, "white")))


try:
    client.loop.run_until_complete(client.start(config.CONFIG["TOKEN"]))
except ConnectionResetError:
    Logger.warn("Connection reset by peer... Everything should still be fine")
except discord.LoginFailure:
    print(
        "It seems like no %s was given or it was incorrect. Please check the %s!"
        % (paint.color("Discord Bot Token",
                       "red"), paint.color("CONFIG.json", "blue")))
    client.loop.run_until_complete(client.logout())
except KeyboardInterrupt:
    client.loop.run_until_complete(client.logout())
finally:
    client.loop.close()
    print("\nShutting down...")
Ejemplo n.º 43
0
from URL import *

from cStringIO import StringIO
import re

import Template

import TimeClock

import Logger
log = Logger.getLogger()


class Response:
    def __init__(self, decoder=None):
        self.rawbody = None
        self.body = None
        self.headers = None
        self.info = None
        self.url = None
        self.code = None

        self.tree = None

        self.decoder = decoder

    # Lazy decoding
    def make_sure(self):
        if self.body == None and self.decoder != None:
            self.body = self.decoder(self.rawbody)
Ejemplo n.º 44
0
#    not use this file except in compliance with the License. You may obtain
#    a copy of the License at
#
#         http://www.apache.org/licenses/LICENSE-2.0
#
#    Unless required by applicable law or agreed to in writing, software
#    distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
#    WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
#    License for the specific language governing permissions and limitations
#    under the License.

from Shell import (execute)
import Logger
from Exceptions import (ProcessExecutionError)

LOG = Logger.getLogger("install.pip")

INSTALL_CMD = ['pip', 'install']
UNINSTALL_CMD = ['pip', 'uninstall']


def install(pips):
    if (not pips or len(pips) == 0):
        return
    actions = list()
    pipnames = sorted(pips.keys())
    for name in pipnames:
        pipfull = name
        pipinfo = pips.get(name)
        if (pipinfo and pipinfo.get('version')):
            version = str(pipinfo.get('version'))
Ejemplo n.º 45
0
 def __init__(self):
     self.v
     self.api
     self.log = Logger.Logger("DogeCopterLog.txt")
Ejemplo n.º 46
0
# -*- coding: utf-8 -*-
import BD as banco_dados
from Emails import *
from PortalCassi import *
from Logger import *

logger = Logger(__name__)


class Recursar(object):
    def EfetuarRecursos(self):
        banco_dados.Conectar()

        try:
            comando_sql = 'SELECT DISTINCT RF.NUM_CARTA_REMESSA \
                           FROM   CAS_RECURSOS_GLOSAS RG \
                           ,      CAS_RETORNOS_FINANCEIROS RF \
                           WHERE  RG.SEQ_RETORNO_FINANCEIRO = RF.SEQ_RETORNO_FINANCEIRO \
                           AND    RG.STATUS_RECURSO         = \'P\''

            banco_dados.Executar(comando_sql)
            cartas_remessas = banco_dados.BuscarTodos()

            if len(cartas_remessas) != 0:
                portal_cassi = PortalCassi()

                try:
                    portal_cassi.Inicializar()
                    portal_cassi.Logar()

                    for carta_remessa in cartas_remessas:
Ejemplo n.º 47
0
def _init():
    Logger.log("is DataManager.init()")
    initDataFileDic()
Ejemplo n.º 48
0
# Messages
from messages import ACCESS_WITHOUT_START
from messages import EXCEPTION_TRIGGERED_MESSAGE
from messages import EXCEPTION_CAUSING_MESSAGE

# Modules
import telepot
import Logger

logger = Logger.Logger()
paybot = telepot.Bot("692962436:AAHOeT1VqRcAboPEBEaJjYKRfvJ9Mj9xCMw")


def parse_handler(user_id, username, message):
    '''
    Passes on the user request to the specific controller
    ARGUMENTS:
    message: string
    RETURN TYPE: void
    '''
    # Initialisation of bot
    command = message.split(' ')[0][1:]
    arguments = message.split(' ')[1:] if len(message.split(' ')) > 1 else None

    # Logging
    logger.command_run(message, username, user_id)

    try:
        if command == "time":
            paybot.sendMessage(user_id, getTime(), parse_mode='Markdown')
        else:
Ejemplo n.º 49
0
import itertools
import pika
from sklearn.externals import joblib
from bayes.bayes_train import get_words, bernousNB_save_path, isChat
from Logger import *
'''
    从文件读取模型并进行分类,打开socket,接收消息
'''

AnswerDict = []
intentionList = []
ask_sentenses_length = 5  # 当未包含关键字,且问话>5个字时,认为需要转接人工了

# 日志
semantics_logfile = 'D:/data/daotai_semantics.log'
semantics_log = Logger(semantics_logfile, level='info')


def loadAnswers():
    with open("../kdata/intention_answer.txt",
              encoding="utf-8",
              errors="ignore") as fo:
        for line in fo.readlines():
            arr = line.strip().split("\t")
            AnswerDict[arr[0]] = arr[2]
            intentionList.append(arr[0])
    print("load answers finished")


def getAnswer(intention):
    result_list = AnswerDict[intention].split("|")
Ejemplo n.º 50
0
# License for the specific language governing permissions and limitations
# under the License.

# TestSuite

import traceback

from QueueHandler import QueueHandler
from Message import MessageObj
from DataMgr import DataMgr
from User import UserObj
from config import *
import Logger
import atexit

logger = Logger.Logging('logging')


def wait_for_sent_finnished(qh):
    import time
    while qh.qsize > 0:
        time.sleep(1)


def test_suite1():
    logger.info('***START OF TEST***')
    try:
        import random
        import time
        import string
        dm = DataMgr()
Ejemplo n.º 51
0
import Logger

logger = Logger.Logger(log_file="output.txt")
with Logger.context_logger(logger):  # redirect stdout to logger object
    print("output from main process")
    Logger.run_subprocess(["python3", "example_to_subprocess.py"])
Ejemplo n.º 52
0
    async def get_ticker_websocket(self, market):
        try:
            currency1 = market.get_currency1()
            currency2 = market.get_currency2()
            async with websockets.connect(self.websocket_address) as websocket:
                json_ticker_subscriber = json.dumps(
                    {"type": "subscribe", "product_ids": [currency1 + "-" + currency2], "channels": ["ticker"]})
                if self.debug:
                    print("json_ticker_subscriber --> : "+json_ticker_subscriber)
                await websocket.send(json_ticker_subscriber)
                info_json = await websocket.recv()
                if self.debug:
                    print("info_json <--: "+info_json)
                # receiving initial data
                temp_str = info_json[info_json.find("best_bid") + 11:]
                bid_rate = temp_str[:temp_str.find(",") - 1]
                if self.debug:
                    print("bid_rate: " + bid_rate)
                temp_str = info_json[info_json.find("best_ask") + 11:]
                ask_rate = temp_str[:temp_str.find(",") - 1]
                if self.debug:
                    print("ask_rate: " + ask_rate)

                market.set_top_bid_order_rate(bid_rate)
                market.set_top_ask_order_rate(ask_rate)

                subscribed_json = await websocket.recv()
                if self.debug:
                    print("subscribed_json <--: " + subscribed_json)
                while True:
                    update_json = await websocket.recv()
                    self.last_answer = update_json
                    if self.debug:
                        print("update_json: " + update_json)

                    first_find = update_json[update_json.find("best_bid") + 11:]
                    bid_rate = first_find[:first_find.find(",")-1]
                    if self.debug:
                        print("bid_rate: " + bid_rate)

                    third_find = update_json[update_json.find("best_ask") + 11:]
                    ask_rate = third_find[:third_find.find(",")-1]
                    if self.debug:
                        print("ask_rate: " + ask_rate)

                    third_find = update_json[update_json.find("time") + 7:]
                    timestamp = third_find[:third_find.find(",") - 1]
                    if self.debug:
                        print("timestamp: " + timestamp)

                    time = timestamp[timestamp.find("T") + 1:]
                    if self.debug:
                        print("time: " + time)

                    market.set_top_bid_order_rate(bid_rate)
                    market.set_top_ask_order_rate(ask_rate)

                    market.set_top_bid_order_timestamp(time)
                    market.set_top_ask_order_timestamp(time)
        except websockets.exceptions.ConnectionClosed as exc:
            print(self.last_answer)
            Logger.log_error(str(datetime.datetime.now())+" "+self.stock_name + " error code: " + str(exc.code) + ", reason: "
                      + str(exc.reason) + ", _cause_ : "+str(exc.__cause__))
            self.iteration = self.iteration + 1
            print("Restarting...")
            Coinbase_GDAXMarketThread = \
                Threading.WebSocketThread(self.iteration, "Thread: " + market.get_market_name() + str(self.iteration),
                                self.iteration, self, market)
            Coinbase_GDAXMarketThread.start()
Ejemplo n.º 53
0
import Logger
from DataManager import DataManager
import os

logger_name = "UpdateDBClasses"
Logger.setup(logger_name)
file_path = os.path.join(os.path.abspath(os.path.dirname(__file__)), "Generated/DatabaseClasses.py")
manager = DataManager(logger_name, email=False)
manager.update_classes_file(file_path)
Ejemplo n.º 54
0
    def OnChange(self, event):

        Logger.info("Change password!")

        currentpw = self.currentpwText.GetValue()
        newpw = self.newpwText.GetValue()
        checknewpw = self.checknewpwText.GetValue()

        if currentpw == u'' or newpw == u'' or checknewpw == u'':
            Util.MessageBox(self, u'密码不能为空。', u'错误',
                            wx.OK | wx.ICON_ERROR | wx.BORDER_DOUBLE)
            Logger.info("The password is None!")
            return
        elif currentpw != Session.Password:
            Util.MessageBox(self, u'当前密码输入错误。', u'错误',
                            wx.OK | wx.ICON_ERROR | wx.BORDER_DOUBLE)
            Logger.info("The current password is wrong!")
            return
        elif len(newpw) < 6 or len(newpw) > 14 or re.search(
                r"\W", newpw) or newpw.isalpha() or newpw.isdigit():
            Util.MessageBox(self, u'您输入的新密码不符合规则,请重新输入。', u'错误',
                            wx.OK | wx.ICON_ERROR | wx.BORDER_DOUBLE)
            Logger.info("The new password doesn't conform to the rules!")
            return
        elif newpw != checknewpw:
            Util.MessageBox(self, u'两次输入的新密码不一致。', u'错误',
                            wx.OK | wx.ICON_ERROR | wx.BORDER_DOUBLE)
            Logger.info("New passwords is not equally!")
            return

        try:
            havclient.user_update_own_password(FirstUser['firstuser'],
                                               currentpw, checknewpw)
            self.Flag = True
            Logger.info("The password is update successful!")
            Util.MessageBox(self, u'修改成功!\n请输入新的密码重新登录!', u'成功',
                            wx.OK | wx.ICON_INFORMATION)

            self.Destroy()
        except:
            Util.MessageBox(self, u'修改失败!', u'错误', wx.OK | wx.ICON_ERROR)
            Logger.info("The password is update failing!")
Ejemplo n.º 55
0
import WeatherGetter
import Logger


class Scheduler:
    def __init__(self):
        self.todo_queue = queue.Queue(0)
        self.schedule = BlockingScheduler()
        # 获取数据
        self.schedule.add_job(lambda: DataChecker.add_to_queue(self.todo_queue, WeatherGetter.get_now),
                              'interval', seconds=1195)
        self.schedule.add_job(lambda: DataChecker.add_to_queue(self.todo_queue, WeatherGetter.get_air),
                              'interval', seconds=3610)  # 跟其他人错开10秒
        self.schedule.add_job(lambda: DataChecker.add_to_queue(self.todo_queue, WeatherGetter.get_lifestyle),
                              'cron', hour='8,11,18', minute='01,21,41')  # 每天的8, 11, 18点左右求一次
        self.schedule.add_job(lambda: DataChecker.add_to_queue(self.todo_queue, WeatherGetter.get_sun_and_moon),
                              'cron', hour=4, minute=3)  # 错开3分钟
        # 写入sql
        self.schedule.add_job(lambda: DataChecker.write_sql(self.todo_queue, DBWriter.insert),
                              'interval', seconds=3620)
        # 分割日志
        self.schedule.add_job(Logger.cut_log, 'cron', hour=0, minute=0)


DBWriter.init()
sch = Scheduler()
Logger.get_logger('Manager')\
    .info('Init System successfully! WeatherGetter Beta 1.0 now ready to run! Github@Origami404!')
sch.schedule.start()
Logger.get_logger('Manager').info('System close.')
Ejemplo n.º 56
0
    RABBIT: [
        joinpths(STACK_CONFIG_DIR, "pkgs", 'rabbitmq.json'),
    ],
}

#subdirs of a components dir
TRACE_DIR = "traces"
APP_DIR = "app"
CONFIG_DIR = "config"

#our ability to create regexes
#which is more like php, which is nicer
#for modifiers...
REGEX_MATCHER = re.compile("^/(.*?)/([a-z]*)$")

LOG = Logger.getLogger("install.util")


def execute_template(*cmds, **kargs):
    if (not cmds or len(cmds) == 0):
        return
    params_replacements = kargs.pop('params')
    ignore_missing = kargs.pop('ignore_missing', False)
    outs = dict()
    for cmdinfo in cmds:
        cmd_to_run_templ = cmdinfo.get("cmd")
        cmd_to_run = list()
        for piece in cmd_to_run_templ:
            if (params_replacements and len(params_replacements)):
                cmd_to_run.append(
                    param_replace(piece,
Ejemplo n.º 57
0
    def run(self, tarfile, targets):
        # Create SOURCES dir and copy opkg tarball to it
        sourcedir = self.getMacro('%{_sourcedir}')
        if not os.path.exists(sourcedir):
            os.makedirs(sourcedir)
        Logger().debug("Copying %s to %s" % (tarfile, sourcedir))
        shutil.copy(tarfile, sourcedir)

        # Create SPECS dir and create spec file
        specdir = self.getMacro('%_specdir')
        if not os.path.exists(specdir):
            os.makedirs(specdir)

        specfile = os.path.join(specdir, "opkg-%s.spec" % self.opkgName)
        if os.path.exists(specfile):
            os.remove(specfile)

        if self.opkgDesc.arch == "all":
            specfile = os.path.join(self.getMacro('%_specdir'),
                                    "opkg-%s.spec" % self.opkgName)
        else:
            specfile = os.path.join(self.getMacro('%_specdir'),
                                    "opkg-%s.spec.tmp" % self.opkgName)
            finalspec = os.path.join(self.getMacro('%_specdir'),
                                     "opkg-%s.spec" % self.opkgName)
        Tools.cheetahCompile(
            RpmSpec(self.opkgDesc, self.dist),
            os.path.join(Config().get(self.configSection, "templatedir"),
                         "opkg.spec.tmpl"), specfile)

        # GV: I am not familiar enough with Cheetah to trick the spec file on
        # the fly so i do it here manually. The idea is to remove the BuildArch
        # line in the spec file when the package is not noarch
        Logger().debug("Arch: %s" % self.opkgDesc.arch)
        if self.opkgDesc.arch == "any":
            Logger().debug(
                "Modifying the spec file for the creation of arch dependent RPMs"
            )
            cmd = "/bin/sed s/BuildArch/\#BuildArch/g < " + specfile + " > " + finalspec
            Logger().info("Executing %s" % (cmd))
            os.system(cmd)
            os.remove(specfile)
            specfile = finalspec
            cmd = "cp " + specfile + " /tmp"
            os.system(cmd)

        # Build targets
        if 'source' in targets:
            if Tools.command("%s --clean -bs %s" % (self.buildCmd, specfile),
                             "./"):
                Logger().info("Source package succesfully generated in %s" %
                              self.getMacro('%_srcrpmdir'))
            else:
                Logger().error("Source package generation failed")
                raise SystemExit(1)

        if 'binary' in targets:
            if Tools.command("%s --clean -bb %s" % (self.buildCmd, specfile),
                             "./"):
                Logger().info("Moving generated files to %s" % self.dest_dir)
                for file in glob.glob(
                        os.path.join(self.getMacro('%_rpmdir'),
                                     "*/opkg-%s*.rpm" % self.opkgName)):
                    Logger().info("Moving files: %s" % file)
                    shutil.move(file, self.dest_dir)
            else:
                Logger().error("Binary package generation failed")
                raise SystemExit(1)
Ejemplo n.º 58
0
    def __init__(self,
                 globalWorkingDir,
                 localWorkingDir,
                 pilotJob=None,
                 rank=None,
                 nonMPIMode=False,
                 outputDir=None,
                 dumpEventOutputs=False):
        threading.Thread.__init__(self)
        self.globalWorkingDir = globalWorkingDir
        self.localWorkingDir = localWorkingDir
        self.currentDir = None
        # database backend
        self.db = Database.Backend(self.globalWorkingDir)
        # logger
        self.tmpLog = Logger.Logger(filename='Yoda.log')

        # communication channel
        self.comm = Interaction.Receiver(rank=rank,
                                         nonMPIMode=nonMPIMode,
                                         logger=self.tmpLog)
        self.rank = self.comm.getRank()

        self.tmpLog.info("Global working dir: %s" % self.globalWorkingDir)
        self.initWorkingDir()
        self.tmpLog.info("Current working dir: %s" % self.currentDir)
        self.failed_updates = []
        self.outputDir = outputDir
        self.dumpEventOutputs = dumpEventOutputs

        self.pilotJob = pilotJob

        self.cores = 10
        self.jobs = []

        # jobs which needs more than one rank
        self.jobRanks = []
        self.totalJobRanks = 0
        # jobs which needs less than one rank
        self.jobRanksSmallPiece = []
        self.totalJobRanksSmallPiece = 0
        self.rankJobsTries = {}

        # scheduler policy:
        self.bigJobFirst = True
        self.lastRankForBigJobFirst = int(self.getTotalRanks() * 0.9)

        self.readyEventRanges = []
        self.runningEventRanges = {}
        self.finishedEventRanges = []

        self.readyJobsEventRanges = {}
        self.runningJobsEventRanges = {}
        self.finishedJobsEventRanges = {}
        self.stagedOutJobsEventRanges = {}

        self.updateEventRangesToDBTime = None

        self.jobMetrics = {}
        self.jobsTimestamp = {}
        self.jobsRuningRanks = {}

        self.originSigHandler = {}
        for sig in [
                signal.SIGTERM, signal.SIGQUIT, signal.SIGSEGV, signal.SIGXCPU,
                signal.SIGUSR1, signal.SIGBUS
        ]:
            self.originSigHandler[sig] = signal.getsignal(sig)
        signal.signal(signal.SIGTERM, self.stop)
        signal.signal(signal.SIGQUIT, self.stop)
        signal.signal(signal.SIGSEGV, self.stop)
        signal.signal(signal.SIGXCPU, self.stopYoda)
        signal.signal(signal.SIGUSR1, self.stopYoda)
        signal.signal(signal.SIGBUS, self.stopYoda)
Ejemplo n.º 59
0
    def run(self, tarfile, targets):
        sourcedir = os.path.join(
            self.dest_dir, "opkg-%s-%s" %
            (self.opkgName, self.opkgDesc.getVersion('upstream')))
        # Rename tar to follow Debian non-native package rule
        debtarfile = os.path.join(
            self.dest_dir, "opkg-%s_%s.orig.tar.gz" %
            (self.opkgName, self.opkgDesc.getVersion('upstream')))
        os.rename(tarfile, debtarfile)

        # Uncompress tar
        if os.path.exists(sourcedir):
            Tools.rmDir(sourcedir)
        if not Tools.untar(debtarfile, self.dest_dir):
            Logger().error("Error while extracting tar file: %s" % debtarfile)
            raise SystemExit(1)

        # Create debian dir
        debiandir = os.path.join(sourcedir, "debian")
        os.makedirs(debiandir)

        # Compile template files
        debDesc = DebDescription(self.opkgDesc, self.dist)
        templateDir = os.path.abspath(Config().get(self.configSection,
                                                   "templatedir"))
        tmplList = [
            os.path.join(templateDir, t) for t in Tools.ls(templateDir)
        ]
        Logger().debug("Templates: %s" % tmplList)
        for template in tmplList:
            if re.search("\.tmpl", template):
                (head, tail) = os.path.split(template)
                (base, ext) = os.path.splitext(tail)
                Tools.cheetahCompile(debDesc, template,
                                     os.path.join(debiandir, base))
            else:
                shutil.copy(template, debiandir)
                Logger().info("Copy %s to %s" % (template, debiandir))

        # GV: For the rules file, we need to do some simple updates and
        # I do not know cheetah enough to do that quickly... there we
        # execute a sed command (yes, it is far from perfect).
        rulescript = debiandir + "/rules"
        cmd = "/bin/sed s/OPKGNAME/" + self.opkgName + "/g < " + debiandir + "/rules.in > " + rulescript
        Logger().info("Executing %s" % (cmd))
        os.system(cmd)
        os.chmod(rulescript, 0744)

        for part in ['api', 'server', 'client']:
            fl = debDesc.getPackageFiles(part)
            installFile = os.path.join(debiandir, debDesc.getInstallFile(part))
            filelist = open(installFile, "a")
            for f in fl:
                filelist.write("%s /%s/\n" % (f['sourcedest'], f['dest']))
            filelist.close()

        # Build targets
        cmd = "%s -rfakeroot -sa" % self.buildCmd
        if 'source' in targets and 'binary' in targets:
            opts = ""
        elif 'source' in targets:
            opts = "-S"
        elif 'binary' in targets:
            opts = "-B"

        if Tools.command("%s %s" % (cmd, opts), sourcedir):
            Logger().info("Packages succesfully generated")
        else:
            Logger().error("Packages generation failed")
            raise SystemExit(1)
Ejemplo n.º 60
0
    def antPage(self):
        try:
            # channel
            c = Channel()
            _val = (
                'http://www.ly.com/scenery/scenerysearchlist_22_0__0_0_0_0_0_0_0.html',
                1)
            c.antChannelList(_val)
            channels = c.channel_list
            if channels and len(channels) > 0:
                Common.log('# add channels num: %d' % len(channels))
                self.mysqlAccess.insertTCChannel(channels)
            else:
                Common.log('# not get channels...')

        except Exception as e:
            Common.log('# TCSpotChannel antpage error: %s' % e)
            Common.traceback_log()


if __name__ == '__main__':
    loggername = 'channel'
    filename = 'add_channel_%s' % (time.strftime("%Y%m%d%H", time.localtime()))
    Logger.config_logging(loggername, filename)
    j = TCSpotChannel()
    Common.log('TCSpotChannel start')
    j.antPage()
    time.sleep(1)
    Common.log('TCSpotChannel end')