Exemplo n.º 1
0
    def invalidate_db_cache(self,
                            name=None,
                            schema=None,
                            cascade=True,
                            drop=True):
        """
        Helper function for store, drops this table, and (by default) any
        that depend on it, as well as removing them from
        the cache metadata table.

        Parameters
        ----------
        name : str
            Name of the table
        schema : str
            Schema of the table
        cascade : bool
            Set to false to remove only this table from cache
        drop : bool
            Set to false to remove the cache record without dropping the table
        """
        with rlock(self.redis, self.md5):
            con = self.connection.engine
            try:
                deps = self.connection.fetch(
                    """SELECT obj FROM cache.cached LEFT JOIN cache.dependencies
                    ON cache.cached.query_id=cache.dependencies.query_id
                    WHERE depends_on='{}'""".format(self.md5))
                with con.begin():
                    con.execute("DELETE FROM cache.cached WHERE query_id=%s",
                                (self.md5, ))
                    logger.debug("Deleted cache record for {}.".format(
                        self.table_name))
                    if drop:
                        con.execute("DROP TABLE IF EXISTS {}".format(
                            self.table_name))
                        logger.debug("Dropped cache for for {}.".format(
                            self.table_name))

                if cascade:
                    for rec in deps:
                        dep = pickle.loads(rec[0])
                        logger.debug(
                            "Cascading to {} from cache record for {}.".format(
                                dep.table_name, self.table_name))
                        dep.invalidate_db_cache()
                else:
                    logger.debug("Not cascading to dependents.")
            except NotImplementedError:
                logger.info("Table has no standard name.")
            if schema is not None:
                full_name = "{}.{}".format(schema, name)
            else:
                full_name = name
            logger.debug("Dropping {}".format(full_name))
            with con.begin():
                con.execute("DROP TABLE IF EXISTS {}".format(full_name))
Exemplo n.º 2
0
 def do_query():
     logger.debug("Getting storage lock.")
     with rlock(self.redis, self.md5):
         logger.debug("Obtained storage lock.")
         con = self.connection.engine
         if force and not as_view:
             self.invalidate_db_cache(name, schema=schema)
         try:
             with con.begin():
                 logger.debug("Using pandas to store.")
                 self._df.to_sql(name, con, schema=schema, index=False)
                 if not as_view and schema == "cache":
                     self._db_store_cache_metadata()
         except AttributeError:
             logger.debug(
                 "No dataframe to store, presumably because this"
                 " was retrieved from the db."
             )
     logger.debug("Released storage lock.")
     return self
Exemplo n.º 3
0
    def get_query(self):
        """
        Returns a  string representing an SQL query. The string will point
        to the database cache of this query if it exists.

        Returns
        -------
        str
            SQL query string.

        """
        try:
            table_name = self.table_name
            schema, name = table_name.split(".")
            with rlock(self.redis, self.md5):
                if self.connection.has_table(schema=schema, name=name):
                    return "SELECT * FROM {}".format(table_name)
        except NotImplementedError:
            pass
        return self._make_query()
Exemplo n.º 4
0
 def do_query():
     logger.debug("Getting storage lock.")
     with rlock(self.redis, self.md5):
         logger.debug("Obtained storage lock.")
         Qs = self._make_sql(name,
                             schema=schema,
                             as_view=as_view,
                             force=force)
         logger.debug("Made SQL.")
         con = self.connection.engine
         if force and not as_view:
             self.invalidate_db_cache(name, schema=schema)
         with con.begin():
             sql = """CREATE SCHEMA IF NOT EXISTS {}""".format(schema)
             con.execute(sql)
             for Q in Qs:
                 con.execute(Q)
             logger.debug("Executed queries.")
             if not as_view and schema == "cache":
                 self._db_store_cache_metadata()
     logger.debug("Released storage lock.")
     return self
Exemplo n.º 5
0
 def unlock(timer):
     with rlock(dl.redis, dl.md5):
         for i in range(101):
             timer.append(i)