Esempio n. 1
0
    def __getstate__(self):
        """Save this object to a pickled state"""
        # Shallow-create a new dictionary based on our current one,
        # but leave out all the properties that start with "_", and
        # the properties that we've already saved to *_properties in
        # the DB. This relies on self._pickle having been set up
        # first. If it hasn't, something has gone terribly wrong, and
        # we've probably not been called through self.save().
        if not hasattr(self, '_pickle'):
            log.error("Failure in pickle for " + self.ob_type() + " " +
                      str(self._id))
            # Panic! (See comment above). The usual cause of this
            # exception is storing a SerObject in foo.bar, where it
            # then gets pickled as foo is pickled, but without having
            # been passed through foo.bar.save(). Instead, use either
            # foo._bar or foo.bar = bar._id.
            raise AssertionError(
                "You have probably stored a reference to a SerObject in an attribute that doesn't start with '_'. Check the logs for which attribute it is."
            )

        obj = {}
        for k in self._pickle:
            # We only take properties that don't start with "_", and
            # which weren't stored in the *_properties table
            if not k.startswith('_'):
                obj[k] = getattr(self, k)
        del (self._pickle)
        return obj
Esempio n. 2
0
 def get_next_after(self, page):
     if page == self.Start:
         return self.SelectFile
     elif page == self.SelectFile:
         return self.MemoryType
     elif page == self.MemoryType:
         return self.WithoutECC if DataStorage(
         ).memory_type == MemoryType.WithoutEcc else self.EccAdress
     elif page == self.WithoutECC:
         return self.DestinationFolder
     elif page == self.EccAdress:
         return self.BanksType
     elif page == self.BanksType:
         return self.SeparatedBanks if DataStorage(
         ).banks_type == BanksType.Separated else self.JointBanks
     elif page == self.JointBanks:
         return self.DestinationFolder
     elif page == self.SeparatedBanks:
         return self.DestinationFolder
     elif page == self.DestinationFolder:
         return self.Result
     elif page == self.Result:
         return self.SelectFile
     else:
         log.error('Unknown page: {}'.format(page))
         return page
Esempio n. 3
0
 def open_page(self, page):
     try:
         page.prepare_to_open()
         self.ui.stackedWidget.setCurrentIndex(self.pages.index(page))
         self.current_page = page
     except Exception as e:
         log.error(e, show_caller=True)
Esempio n. 4
0
 def _setup(self):
     """Called after __init__ or after load()"""
     self._changed = False
     self._deleted = False
     self._changed_props = set()
     self._failed_props = set()
     if self._id not in self.__class__.cache_by_id:
         self.__class__.cache_by_id[self._id] = self
     else:
         log.error("Attempting to recache object with ID " + str(self._id) +
                   ": cache holds " +
                   str(self.__class__.cache_by_id[self._id]) +
                   " and we are " + str(self))
Esempio n. 5
0
    def _cache_object(cls, obj):
        """Hook to allow classes to define their own caching scheme.
		Called after an object has been loaded and cached in
		cls.cache_by_id"""
        super(Location, cls)._cache_object(obj)
        rpos = repr(obj.pos)
        stack = cls.cache_by_pos.setdefault(rpos, {})
        if obj.overlay in stack:
            # FIXME: Raise an exception here: we've just loaded a
            # duplicate object
            log.error(
                "Trying to cache a Location overlay that already exists, at (%d, %d, %s, %d)"
                % (obj.pos.x, obj.pos.y, obj.pos.layer, obj.overlay))
            return
        stack[obj.overlay] = obj
Esempio n. 6
0
def retry_process(process):
    complete = False
    attempts = 5
    ret = False
    while not complete and attempts > 0:
        xact = DB.cursor()

        try:
            ret = process()
            DB.commit()
            complete = True
        except Exception, ex:
            log.debug("Exception!: " + str(type(ex)))
            msg = traceback.format_exception(*(sys.exc_info()))
            log.error(''.join(msg))
            attempts -= 1
            DB.rollback()
            time.sleep(5)
Esempio n. 7
0
 def get_previous_before(self, page):
     if page == self.Start:
         return page
     elif page == self.SelectFile:
         return self.Start
     elif page == self.MemoryType:
         return self.SelectFile
     elif page == self.WithoutECC:
         return self.MemoryType
     elif page == self.EccAdress:
         return self.MemoryType
     elif page == self.BanksType:
         return self.EccAdress
     elif page == self.JointBanks:
         return self.BanksType
     elif page == self.SeparatedBanks:
         return self.BanksType
     elif page == self.Result:
         return page
     else:
         log.error('Unknown page: {}'.format(page))
         return page
Esempio n. 8
0
    def run(self):
        self._create_db()
        while True:
            if not self._queue.empty():
                try:
                    sql, params, result = self._queue.get()
                    log.info("[DBWrapper] QUERY: %s" % sql)
                    log.info("[DBWrapper] PARAMS: %s" % str(params))
                    log.info("[DBWrapper] RESULT: " + str(result))
                    cursor = self._db.cursor()
                    if params:
                        cursor.execute(sql, params)
                    else:
                        cursor.execute(sql)
                except sqlite3.OperationalError, e:
                    log.error("[DBWrapper] OperationalError : %s" % e)

                if result:
                    log.info("[DBWrapper] Putting Results")
                    for row in cursor.fetchall():
                        result.put(row)
                    result.put("__END__")

                self._db.commit()
Esempio n. 9
0
    def run(self):
        self._create_db()
        while True:
            if not self._queue.empty():
                try:
                    sql, params, result = self._queue.get()
                    log.info("[DBWrapper] QUERY: %s" % sql)
                    log.info("[DBWrapper] PARAMS: %s" % str(params))
                    log.info("[DBWrapper] RESULT: " + str(result))
                    cursor = self._db.cursor()
                    if params:
                        cursor.execute(sql, params)
                    else:
                        cursor.execute(sql)
                except sqlite3.OperationalError, e:
                    log.error("[DBWrapper] OperationalError : %s" % e)

                if result:
                    log.info("[DBWrapper] Putting Results")
                    for row in cursor.fetchall():
                        result.put(row)
                    result.put("__END__")

                self._db.commit()
Esempio n. 10
0
def getInstance():
	global instance
	
	if instance is None:
		
		log.reinit()
		
		from plugin import VERSION
		
		log.debug(" SERIESPLUGIN NEW INSTANCE " + VERSION)
		log.debug( " ", strftime("%a, %d %b %Y %H:%M:%S", localtime()) )
		
		try:
			from Tools.HardwareInfo import HardwareInfo
			log.debug( " DeviceName " + HardwareInfo().get_device_name().strip() )
		except:
			sys.exc_clear()
		
		try:
			from Components.About import about
			log.debug( " EnigmaVersion " + about.getEnigmaVersionString().strip() )
			log.debug( " ImageVersion " + about.getVersionString().strip() )
		except:
			sys.exc_clear()
		
		try:
			#http://stackoverflow.com/questions/1904394/python-selecting-to-read-the-first-line-only
			log.debug( " dreamboxmodel " + open("/proc/stb/info/model").readline().strip() )
			log.debug( " imageversion " + open("/etc/image-version").readline().strip() )
			log.debug( " imageissue " + open("/etc/issue.net").readline().strip() )
		except:
			sys.exc_clear()
		
		try:
			for key, value in config.plugins.seriesplugin.dict().iteritems():
				log.debug( " config..%s = %s" % (key, str(value.value)) )
		except Exception as e:
			sys.exc_clear()
		
		global CompiledRegexpReplaceChars
		try:
			if config.plugins.seriesplugin.replace_chars.value:
				CompiledRegexpReplaceChars = re.compile('['+config.plugins.seriesplugin.replace_chars.value.replace("\\", "\\\\\\\\")+']')
		except:
			log.exception( " Config option 'Replace Chars' is no valid regular expression" )
			CompiledRegexpReplaceChars = re.compile("[:\!/\\,\(\)'\?]")
		
		# Check autotimer
		try:
			from Plugins.Extensions.AutoTimer.plugin import autotimer
			deprecated = False
			try:
				from Plugins.Extensions.AutoTimer.plugin import AUTOTIMER_VERSION
				if int(AUTOTIMER_VERSION[0]) < 4:
					deprecated = True
			except ImportError:
				AUTOTIMER_VERSION = "deprecated"
				deprecated = True
			log.debug( " AutoTimer: " + AUTOTIMER_VERSION )
			if deprecated:
				log.warning( _("Your autotimer is deprecated")  + "\n" +_("Please update it") )
		except ImportError:
			log.debug( " AutoTimer: Not found" )
		
		# Check dependencies
		start = True
		from imp import find_module
		dependencies = ["difflib", "json", "re", "xml", "xmlrpclib"]
		for dependency in dependencies:
			try:
				find_module(dependency)
			except ImportError:
				start = False
				log.error( _("Error missing dependency")  + "\n" + "python-"+dependency + "\n\n" +_("Please install missing python paket manually") )
		if start:
			instance = SeriesPlugin()
		
	return instance
Esempio n. 11
0
	def getEpisode(self, callback, name, begin, end=None, service=None, future=False, today=False, elapsed=False, block=False, rename=False):
		
		if config.plugins.seriesplugin.skip_during_records.value:
			try:
				import NavigationInstance
				if NavigationInstance.instance.RecordTimer.isRecording():
					msg = _("Skip check during running records") + "\n\n" + _("Can be configured within the setup")
					log.warning( msg)
					if callable(callback):
						callback(msg)
					return msg
			except:
				pass
		
		# Check for episode information in title
		match = self.compiledRegexpSeries.match(name)
		if match:
			#log.debug(match.group(0))     # Entire match
			#log.debug(match.group(1))     # First parenthesized subgroup
			if not rename and config.plugins.seriesplugin.skip_pattern_match.value:
				msg = _("Skip check because of pattern match") + "\n" + name + "\n\n" + _("Can be configured within the setup")
				log.warning(msg)
				if callable(callback):
					callback(msg)
				return msg
			if match.group(1):
				name = match.group(1)
		
		if elapsed:
			identifier = self.identifier_elapsed
		elif today:
			identifier = self.identifier_today
		elif future:
			identifier = self.identifier_future
		else:
			identifier = self.modules and self.instantiateModule( self.modules.itervalues().next() )
		
		if not identifier:
			msg = _("No identifier available") + "\n\n" + _("Please check Your installation")
			log.error(msg)
			if callable(callback):
				callback(msg)
			return msg
		
		elif self.channelsEmpty():
			msg = _("Channels are not matched") + "\n\n" + _("Please open the channel editor (setup) and match them")
			log.error(msg)
			if callable(callback):
				callback(msg)
			return msg
			
		else:
			# Reset title search depth on every new request
			identifier.search_depth = 0;
			
			# Reset the knownids on every new request
			identifier.knownids = []
			
			try:
				serviceref = service.toString()
			except:
				sys.exc_clear()
				serviceref = str(service)
			serviceref = re.sub('::.*', ':', serviceref)

			if block == False:
				
				self.thread.add( ThreadItem(identifier, callback, name, begin, end, serviceref) )
				
			else:
				
				result = None
				
				try:
					result = identifier.getEpisode( name, begin, end, serviceref )
				except Exception, e:
					log.exception("Worker:", str(e))
					
					# Exception finish job with error
					result = str(e)
				
				config.plugins.seriesplugin.lookup_counter.value += 1
				
				data = normalizeResult(result)
				
				if callable(callback):
					callback(data)
				
				return data
Esempio n. 12
0
    def save(self, force=False):
        """Save to the database the parts of the object that have changed"""
        if not force and not self._changed:
            return
        if self._deleted:
            cur = DB.cursor()
            cur.execute('DELETE FROM ' + self._table + ' WHERE id=%(id)s',
                        {'id': self._id})
            self._changed = False
            return

        #log.debug("save " + self.ob_type() + str(self._id) + ": changed is " + str(self._changed_props))
        #log.debug("Full dump: " + str(self.__dict__))

        # The only time pickle() gets called is during save. We set up
        # for that event by constructing a set of property names that
        # we should pickle (rather than dump to the database table)
        self._pickle = set()

        # First, we iterate through the elements of the object that
        # have changed and save them. Anything which is not an atomic
        # type is punted for later
        params = {'id': self._id}

        cur = DB.cursor()

        for key in self.__dict__.iterkeys():
            params['key'] = key
            params['value'] = self.__dict__[key]

            # Skip properties which begin with _
            if key[0] == '_':
                continue

            # Ignore/delete properties which are None
            if self.__dict__[key] is None:
                cur.execute(
                    'DELETE FROM ' + self._table + '_properties' + ' WHERE ' +
                    self._table + '_id=%(id)s' + '   AND key=%(key)s', params)
                continue

            # Work out the type (and hence the DB serialisation) of
            # the object we're looking at
            typ = type(self.__dict__[key])
            if typ is int:
                params['type'] = 'i'
                value_field = 'ivalue'
            elif typ is float:
                params['type'] = 'f'
                value_field = 'fvalue'
            elif typ is str or typ is unicode:
                params['type'] = 't'
                value_field = 'tvalue'
            elif typ is bool:
                params['type'] = 'b'
                value_field = 'ivalue'
            elif issubclass(typ, SerObject):
                # This is a Bad Thing: complain about it lots
                log.error(
                    "The attribute '%s' contains a SerObject (%s). This combination cannot be pickled. Your attribute has not been stored at all. You should fix this as soon as possible."
                    % (key, str(self.__dict__[key])))
            elif hasattr(self.__dict__[key], 'save'):
                # If the object has its own save() method, call that
                # as well, but still pickle it
                self.__dict__[key].save()
                self._pickle.add(key)
                continue
            else:
                # It's not an atomic type that we know about, or which
                # wants to handle itself, so we're going to pickle this
                # property into the central store, not write to the DB
                self._pickle.add(key)
                continue

            # If the key wasn't changed, we don't need to do anything
            if key not in self._changed_props:
                continue

            # At this point, we've got an atomic type we understand
            # and can put into *_properties, so we do so. The
            # following code is idiomatic for an insert-or-update in
            # postgres (as per
            # http://www.postgresql.org/docs/8.3/static/sql-update.html).
            cur.execute('SAVEPOINT update1')
            try:
                # Try an insert first.
                sql = 'INSERT INTO ' + self._table + '_properties'
                sql += ' (' + self._table + '_id, key, '
                sql += 'type, ' + value_field + ')'
                sql += 'VALUES (%(id)s, %(key)s, %(type)s, %(value)s)'
                cur.execute(sql, params)
            except psycopg2.Error, ex:
                # If the insert failed (due to a primary key
                # uniqueness violation), skip back and try an update
                # instead
                cur.execute('ROLLBACK TO SAVEPOINT update1')
                cur.execute(
                    'UPDATE ' + self._table + '_properties' + ' SET ' +
                    value_field + ' = %(value)s,' + '	 type = %(type)s' +
                    ' WHERE ' + self._table + '_id = %(id)s' +
                    '   AND key = %(key)s', params)
            else:
                # If the insert succeeded, we need to tie off the
                # savepoint. If it failed, we don't have to do this,
                # as it was rolled back in the except: section above.
                cur.execute('RELEASE SAVEPOINT update1')
Esempio n. 13
0
            return res
        return fn2
    return deco    

            
if __name__ == "__main__":
    
    print "# TEST 1"
    log.info("test")
    
    print "# TEST 2"
    msgMap["errId_1"]="Test error with the var '{var}'"
    err = createError(key="errId_1", 
                      args={"var":"alpha"}, 
                      exception=EnvironmentError())
    log.error(err)
    
    print "# TEST 3"
    log.warn("Current version is %s "%configParser.get("DEFAULT","version"))
    
    print "# TEST 4"
    @checkTypeParams(int,lola = int)
    @checkTypeReturned(str,int,dict)
    def testMethod(nb,lola= 1):
        res = lola+nb
        return "%f"%res,res,{"res":res}
    
    print testMethod(9,lola=3)
    try:
        testMethod(lola="5")
    except TypeError as e:
Esempio n. 14
0
 def handle(self,err):
     self.__errors.append(err)
     log.error(err)
Esempio n. 15
0
 def handle(self, err):
     self.__errors.append(err)
     log.error(err)