def save(self): """ Save caches to specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Clear expired items and gather all caches into a single object self.expire() log.cache("Cache dump stats:\n" + self.stats().strip()) data = {} for current_class in self._classes: # Put container classes into id-sleep if issubclass(current_class, containers.Container): for container in current_class._cache.itervalues(): container._sleep() data[current_class.__name__] = current_class._cache # Dump the cache object into file try: # Use temporary file to minimize the time during which # the real cache is inconsistent output_file = tempfile.NamedTemporaryFile( mode="wb", delete=False, prefix="nitrate-cache.", dir=os.path.dirname(self._filename)) log.cache("Temporary cache file: {0}".format(output_file.name)) output_file = gzip.open(output_file.name, "wb") log.debug("Saving persistent cache into {0}".format( self._filename)) pickle.dump(data, output_file) output_file.close() os.rename(output_file.name, self._filename) log.debug("Persistent cache successfully saved") except IOError as error: log.error("Failed to save persistent cache ({0})".format(error))
def save(self): """ Save caches to specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Clear expired items and gather all caches into a single object self.expire() log.cache("Cache dump stats:\n" + self.stats().strip()) data = {} for current_class in self._classes: # Put container classes into id-sleep if issubclass(current_class, containers.Container): for container in current_class._cache.values(): container._sleep() data[current_class.__name__] = current_class._cache # Dump the cache object into file try: # Use temporary file to minimize the time during which # the real cache is inconsistent output_file = tempfile.NamedTemporaryFile( mode="wb", delete=False, prefix="nitrate-cache.", dir=os.path.dirname(self._filename)) log.cache("Temporary cache file: {0}".format(output_file.name)) output_file = gzip.open(output_file.name, "wb") log.debug("Saving persistent cache into {0}".format( self._filename)) pickle.dump(data, output_file) output_file.close() os.rename(output_file.name, self._filename) log.debug("Persistent cache successfully saved") except IOError as error: log.error("Failed to save persistent cache ({0})".format(error))
def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error), error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return
def _fetch(self, inset=None): """ Fetch case runs from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch test case runs from the server log.info("Fetching {0}'s case runs".format(self._identifier)) try: injects = self._teiid.run_case_runs(self.id) except teiid.TeiidNotConfigured: injects = self._server.TestRun.get_test_case_runs(self.id) except psycopg2.DatabaseError as error: log.debug("Failed to fetch data from Teiid: {0}".format(error)) injects = self._server.TestRun.get_test_case_runs(self.id) # Feed the TestRun.testcases container with the initial object # set if all cases are already cached (saving unnecesary fetch) testcaseids = [inject["case_id"] for inject in injects] if (not RunCases._is_cached(self._object.testcases) and TestCase._is_cached(testcaseids)): self._object.testcases._fetch([TestCase(id) for id in testcaseids]) # And finally create the initial object set self._current = set([ CaseRun(inject, testcaseinject=testcase) for inject in injects for testcase in self._object.testcases._items if int(inject["case_id"]) == testcase.id ]) self._original = set(self._current)
def __init__(self): """ Initialize the connection if Teiid configured """ # Fetch connection data from the config, bail out if missing config = Config() try: database = config.teiid.database user = config.teiid.user password = config.teiid.password host = config.teiid.host port = config.teiid.port except AttributeError: log.debug("Teiid not configured, skipping db connection") self.connection = None return # Initialize the connection log.debug("Connecting as {0} to database {1} at {2}:{3}".format( user, database, host, port)) try: self.connection = psycopg2.connect(database=database, user=user, password=password, host=host, port=port) except psycopg2.DatabaseError as error: log.error("Teiid connect error: {0}".format(error)) raise TeiidError("Failed to connect to the Teiid instance") self.connection.set_isolation_level(0)
def _fetch(self, inset=None): """ Fetch case runs from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch test case runs from the server log.info("Fetching {0}'s case runs".format(self._identifier)) try: injects = self._teiid.run_case_runs(self.id) except teiid.TeiidNotConfigured: injects = self._server.TestRun.get_test_case_runs(self.id) except psycopg2.DatabaseError as error: log.debug("Failed to fetch data from Teiid: {0}".format(error)) injects = self._server.TestRun.get_test_case_runs(self.id) # Feed the TestRun.testcases container with the initial object # set if all cases are already cached (saving unnecesary fetch) testcaseids = [inject["case_id"] for inject in injects] if (not RunCases._is_cached(self._object.testcases) and TestCase._is_cached(testcaseids)): self._object.testcases._fetch([TestCase(id) for id in testcaseids]) # And finally create the initial object set self._current = set([CaseRun(inject, testcaseinject=testcase) for inject in injects for testcase in self._object.testcases._items if int(inject["case_id"]) == testcase.id]) self._original = set(self._current)
def _fetch(self, inset=None): """ Fetch currently attached tags from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return log.info("Fetching tags for {0}".format(self._identifier)) injects = self._server.TestCase.get_tags(self.id) log.debug(pretty(injects)) self._current = set([Tag(inject) for inject in injects]) self._original = set(self._current)
def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error) as error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return # Restore cache for immutable & mutable classes first for current_class in self._immutable + self._mutable: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Containers to be loaded last (to prevent object duplicates) for current_class in self._containers: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Wake up container objects from the id-sleep for container in current_class._cache.itervalues(): container._wake() # Clear expired items and give a short summary for debugging self.expire() log.cache("Cache restore stats:\n" + self.stats().strip())
def load(self): """ Load caches from specified file """ # Nothing to do when persistent caching is off if not self._filename or get_cache_level() < config.CACHE_PERSISTENT: return # Load the saved cache from file try: log.debug("Loading persistent cache from {0}".format( self._filename)) input_file = gzip.open(self._filename, 'rb') data = pickle.load(input_file) input_file.close() except EOFError: log.cache("Cache file empty, will fill it upon exit") return except (IOError, zlib.error) as error: if getattr(error, "errno", None) == 2: log.warn("Cache file not found, will create one on exit") return else: log.error("Failed to load the cache ({0})".format(error)) log.warn("Going on but switching to the CACHE_OBJECTS level") set_cache_level(config.CACHE_OBJECTS) self.unlock() return # Restore cache for immutable & mutable classes first for current_class in self._immutable + self._mutable: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Containers to be loaded last (to prevent object duplicates) for current_class in self._containers: try: log.cache("Loading cache for {0}".format( current_class.__name__)) current_class._cache = data[current_class.__name__] except KeyError: log.cache("Failed to load cache for {0}, starting " "with empty".format(current_class.__name__)) current_class._cache = {} # Wake up container objects from the id-sleep for container in current_class._cache.values(): container._wake() # Clear expired items and give a short summary for debugging self.expire() log.cache("Cache restore stats:\n" + self.stats().strip())
def _fetch(self, inject=None): """ Fetch user data from the server """ Nitrate._fetch(self, inject) if inject is None: # Search by id if self._id is not NitrateNone: try: log.info("Fetching user " + self.identifier) inject = self._server.User.filter({"id": self.id})[0] except IndexError: raise NitrateError( "Cannot find user for " + self.identifier) # Search by login elif self._login is not NitrateNone: try: log.info( "Fetching user for login '{0}'".format(self.login)) inject = self._server.User.filter( {"username": self.login})[0] except IndexError: raise NitrateError("No user found for login '{0}'".format( self.login)) # Search by email elif self._email is not NitrateNone: try: log.info("Fetching user for email '{0}'".format( self.email)) inject = self._server.User.filter({"email": self.email})[0] except IndexError: raise NitrateError("No user found for email '{0}'".format( self.email)) # Otherwise initialize to the current user else: log.info("Fetching the current user") inject = self._server.User.get_me() self._index("i-am-current-user") # Initialize data from the inject and index into cache log.debug("Initializing user UID#{0}".format(inject["id"])) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] self._login = inject["username"] self._email = inject["email"] if inject["first_name"] and inject["last_name"]: self._name = inject["first_name"] + " " + inject["last_name"] else: self._name = None self._index(self.login, self.email)
def _fetch(self, inset=None): """ Save cache timestamp and initialize from inset if given """ Nitrate._fetch(self) # Create copies of the initial set (if given) if inset is not None: log.debug("Initializing {0} for {1} from the inset".format(self.__class__.__name__, self._identifier)) log.debug(pretty(inset)) self._current = set(inset) self._original = set(inset) # Cache into container class if config.get_cache_level() >= config.CACHE_OBJECTS: self.__class__._cache[self._id] = self # Return True if the data are already initialized return inset is not None
def _fetch(self, inset=None): """ Fetch case runs from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch test case runs from the server log.info("Fetching {0}'s case runs".format(self._identifier)) try: injects = self._teiid.run_case_runs(self.id) except teiid.TeiidNotConfigured: injects = self._server.TestRun.get_test_case_runs(self.id) except psycopg2.DatabaseError, error: log.debug("Failed to fetch data from Teiid: {0}".format(error)) injects = self._server.TestRun.get_test_case_runs(self.id)
def _fetch(self, inset=None): """ Fetch test run cases from the server """ # If data initialized from the inset ---> we're done if Container._fetch(self, inset): return # Fetch attached test cases from the server log.info("Fetching {0}'s test cases".format(self._identifier)) try: injects = self._teiid.run_cases(self.id) except teiid.TeiidNotConfigured: injects = self._server.TestRun.get_test_cases(self.id) except psycopg2.DatabaseError as error: log.debug("Failed to fetch data from Teiid: {0}".format(error)) injects = self._server.TestRun.get_test_cases(self.id) self._current = set([TestCase(inject) for inject in injects]) self._original = set(self._current)
def _fetch(self, inset=None): """ Save cache timestamp and initialize from inset if given """ Nitrate._fetch(self) # Create copies of the initial set (if given) if inset is not None: log.debug("Initializing {0} for {1} from the inset".format( self.__class__.__name__, self._identifier)) log.debug(pretty(inset)) self._current = set(inset) self._original = set(inset) # Cache into container class if config.get_cache_level() >= config.CACHE_OBJECTS: self.__class__._cache[self._id] = self # Return True if the data are already initialized return inset is not None
def _server(self): """ Connection to the server """ # Connect to the server unless already connected if Nitrate._connection is None: log.debug(u"Contacting server {0}".format( Config().nitrate.url)) # Plain authentication if username & password given try: Nitrate._connection = xmlrpc_driver.NitrateXmlrpc( Config().nitrate.username, Config().nitrate.password, Config().nitrate.url).server # Kerberos otherwise except AttributeError: Nitrate._connection = xmlrpc_driver.NitrateKerbXmlrpc( Config().nitrate.url).server # Return existing connection Nitrate._requests += 1 return Nitrate._connection
def _server(self): """ Connection to the server """ # Connect to the server unless already connected if Nitrate._connection is None: log.debug(u"Contacting server {0}".format( Config().nitrate.url)) # Plain authentication if username & password given try: Nitrate._connection = xmlrpc.NitrateXmlrpc( Config().nitrate.username, Config().nitrate.password, Config().nitrate.url).server # Kerberos otherwise except AttributeError: Nitrate._connection = xmlrpc.NitrateKerbXmlrpc( Config().nitrate.url).server # Return existing connection Nitrate._requests += 1 return Nitrate._connection
def setup(self, filename=None): """ Set cache filename and initialize expiration times """ # Nothing to do when persistent caching is off if get_cache_level() < config.CACHE_PERSISTENT: return # Detect cache filename, argument first, then config if filename is not None: self._filename = filename else: try: self._filename = config.Config().cache.file except AttributeError: log.warn("Persistent caching off " "(cache filename not found in the config)") self._lock = "{0}.lock".format(self._filename) # Initialize user-defined expiration times from the config for klass in self._classes + [ Nitrate, mutable.Mutable, containers.Container ]: try: expiration = getattr(config.Config().expiration, klass.__name__.lower()) except AttributeError: continue # Convert from seconds, handle special values if isinstance(expiration, int): expiration = datetime.timedelta(seconds=expiration) elif expiration == "NEVER_EXPIRE": expiration = config.NEVER_EXPIRE elif expiration == "NEVER_CACHE": expiration = config.NEVER_CACHE # Give warning for invalid expiration values if isinstance(expiration, datetime.timedelta): klass._expiration = expiration log.debug("User defined expiration for {0}: {1}".format( klass.__name__, expiration)) else: log.warn("Invalid expiration time '{0}'".format(expiration))
def _fetch(self, inject=None): """ Get the missing test plan type data """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.info("Processing PlanType ID#{0} inject".format(inject["id"])) # Search by test plan type id elif self._id is not NitrateNone: try: log.info("Fetching test plan type " + self.identifier) inject = self._server.TestPlan.get_plan_type(self.id) except xmlrpclib.Fault as error: log.debug(error) raise NitrateError( "Cannot find test plan type for " + self.identifier) # Search by test plan type name else: try: log.info(u"Fetching test plan type '{0}'".format(self.name)) inject = self._server.TestPlan.check_plan_type(self.name) except xmlrpclib.Fault as error: log.debug(error) raise NitrateError("PlanType '{0}' not found".format( self.name)) # Initialize data from the inject and index into cache log.debug("Initializing PlanType ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] self._name = inject["name"] self._index(self.name)
def _fetch(self, inject=None): """ Fetch version data from the server """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.debug("Processing Version ID#{0} inject".format(inject["id"])) # Search by version id elif self._id is not NitrateNone: try: log.info("Fetching version {0}".format(self.identifier)) inject = self._server.Product.filter_versions( {'id': self.id})[0] except IndexError: raise NitrateError( "Cannot find version for {0}".format(self.identifier)) # Search by product and name else: try: log.info(u"Fetching version '{0}' of '{1}'".format( self.name, self.product.name)) inject = self._server.Product.filter_versions( {'product': self.product.id, 'value': self.name})[0] except IndexError: raise NitrateError( "Cannot find version for '{0}'".format(self.name)) # Initialize data from the inject and index into cache log.debug("Initializing Version ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] self._name = inject["value"] self._product = Product(inject["product_id"]) # Index by product name & version name (if product is cached) if self.product._name is not NitrateNone: self._index("{0}---in---{1}".format(self.name, self.product.name)) # Otherwise index by id only else: self._index()
def setup(self, filename=None): """ Set cache filename and initialize expiration times """ # Nothing to do when persistent caching is off if get_cache_level() < config.CACHE_PERSISTENT: return # Detect cache filename, argument first, then config if filename is not None: self._filename = filename else: try: self._filename = config.Config().cache.file except AttributeError: log.warn("Persistent caching off " "(cache filename not found in the config)") self._lock = "{0}.lock".format(self._filename) # Initialize user-defined expiration times from the config for klass in self._classes + [Nitrate, mutable.Mutable, containers.Container]: try: expiration = getattr( config.Config().expiration, klass.__name__.lower()) except AttributeError: continue # Convert from seconds, handle special values if isinstance(expiration, int): expiration = datetime.timedelta(seconds=expiration) elif expiration == "NEVER_EXPIRE": expiration = config.NEVER_EXPIRE elif expiration == "NEVER_CACHE": expiration = config.NEVER_CACHE # Give warning for invalid expiration values if isinstance(expiration, datetime.timedelta): klass._expiration = expiration log.debug("User defined expiration for {0}: {1}".format( klass.__name__, expiration)) else: log.warn("Invalid expiration time '{0}'".format(expiration))
def __init__(self): """ Initialize the connection if Teiid configured """ # Fetch connection data from the config, bail out if missing config = Config() try: database = config.teiid.database user = config.teiid.user password = config.teiid.password host = config.teiid.host port = config.teiid.port except AttributeError: log.debug("Teiid not configured, skipping db connection") self.connection = None return # Initialize the connection log.debug("Connecting as {0} to database {1} at {2}:{3}".format( user, database, host, port)) try: self.connection = psycopg2.connect(database=database, user=user, password=password, host=host, port=port) except psycopg2.DatabaseError, error: log.error("Teiid connect error: {0}".format(error)) raise TeiidError("Failed to connect to the Teiid instance")
def _fetch(self, inject=None): """ Get the missing build data """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.info("Processing build ID#{0} inject".format( inject["build_id"])) # Search by build id elif self._id is not NitrateNone: try: log.info("Fetching build " + self.identifier) inject = self._server.Build.get(self.id) except xmlrpclib.Fault as error: log.debug(error) raise NitrateError( "Cannot find build for " + self.identifier) # Search by build name and product else: try: log.info(u"Fetching build '{0}' of '{1}'".format( self.name, self.product.name)) inject = self._server.Build.check_build( self.name, self.product.id) self._id = inject["build_id"] except xmlrpclib.Fault as error: log.debug(error) raise NitrateError("Build '{0}' not found in '{1}'".format( self.name, self.product.name)) except KeyError: if "args" in inject: log.debug(inject["args"]) raise NitrateError("Build '{0}' not found in '{1}'".format( self.name, self.product.name)) # Initialize data from the inject and index into cache log.debug("Initializing Build ID#{0}".format(inject["build_id"])) log.data(pretty(inject)) self._inject = inject self._id = inject["build_id"] self._name = inject["name"] self._product = Product( {"id": inject["product_id"], "name": inject["product"]}) self._index("{0}---in---{1}".format(self.name, self.product.name))
def _fetch(self, inject=None): """ Get the missing category data """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.info("Processing category ID#{0} inject".format(inject["id"])) # Search by category id elif self._id is not NitrateNone: try: log.info("Fetching category {0}".format(self.identifier)) inject = self._server.Product.get_category(self.id) except xmlrpclib.Fault as error: log.debug(error) raise NitrateError( "Cannot find category for " + self.identifier) # Search by category name and product else: try: log.info(u"Fetching category '{0}' of '{1}'".format( self.name, self.product.name)) inject = self._server.Product.check_category( self.name, self.product.id) except xmlrpclib.Fault as error: log.debug(error) raise NitrateError("Category '{0}' not found in" " '{1}'".format(self.name, self.product.name)) # Initialize data from the inject and index into cache log.debug("Initializing category ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] self._name = inject["name"] self._product = Product( {"id": inject["product_id"], "name": inject["product"]}) self._index("{0}---in---{1}".format(self.name, self.product.name))
def _fetch(self, inject=None): """ Fetch product data from the server """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.debug("Initializing Product ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._id = inject["id"] self._name = inject["name"] # Search by product id elif self._id is not NitrateNone: try: log.info("Fetching product " + self.identifier) inject = self._server.Product.filter({'id': self.id})[0] log.debug("Initializing product " + self.identifier) log.data(pretty(inject)) self._inject = inject self._name = inject["name"] except IndexError: raise NitrateError( "Cannot find product for " + self.identifier) # Search by product name else: try: log.info(u"Fetching product '{0}'".format(self.name)) inject = self._server.Product.filter({'name': self.name})[0] log.debug(u"Initializing product '{0}'".format(self.name)) log.data(pretty(inject)) self._inject = inject self._id = inject["id"] except IndexError: raise NitrateError( "Cannot find product for '{0}'".format(self.name)) # Index the fetched object into cache self._index(self.name)
def _fetch(self, inject=None): """ Fetch tag data from the server """ Nitrate._fetch(self, inject) # Directly fetch from the initial object dict if inject is not None: log.debug("Initializing Tag ID#{0}".format(inject["id"])) log.data(pretty(inject)) self._id = inject["id"] self._name = inject["name"] # Search by tag id elif self._id is not NitrateNone: try: log.info("Fetching tag " + self.identifier) inject = self._server.Tag.get_tags({'ids': [self.id]}) log.debug("Initializing tag " + self.identifier) log.data(pretty(inject)) self._inject = inject self._name = inject[0]["name"] except IndexError: raise NitrateError( "Cannot find tag for {0}".format(self.identifier)) # Search by tag name else: try: log.info(u"Fetching tag '{0}'".format(self.name)) inject = self._server.Tag.get_tags({'names': [self.name]}) log.debug(u"Initializing tag '{0}'".format(self.name)) log.data(pretty(inject)) self._inject = inject self._id = inject[0]["id"] except IndexError: raise NitrateError( "Cannot find tag '{0}'".format(self.name)) # Index the fetched object into cache self._index(self.name)