def update_collects(self, collect_id, error_list, status, status_only=False): """ Utility function to update collects table status against collect id :param collect_id: Collect id of entry in collects table :param error_list: List of errors if there, that occurred while processing error :param status: status message after processing the collect :param status_only: flag to determine only status needs to be updated :return: True if collects is updated, else False """ self.__logger.info("Executing update_collects function") try: if status_only: update_query = "update collects set status=%s where id=%s" self.__logger.debug('Running query: %s' % update_query % (status, collect_id)) with DBUtils((self.__config['application'])) as dbConn: dbConn.execute_nonquery(update_query, (status, collect_id)) else: update_query = "update collects set status=%s, status_message=%s where id=%s" self.__logger.debug('Running query: %s' % update_query % (status, error_list, collect_id)) with DBUtils((self.__config['application'])) as dbConn: dbConn.execute_nonquery(update_query, (status, json.dumps({'errors': error_list}), collect_id)) return True except Exception as ex: self.__logger.error("Could not update collects table {0}".format(str(ex))) return False
class SharedManager: def __init__(self): self.__bdmanager = DBUtils() def exportSnippet(self,path,snippets): u'''Recibe una bd en forma de lista de listas y los inserta en una nueva BD >>> exportSnippet(bd,"ale.db") True ''' self.__bdmanager.newDataBase(path) self.__sql = sqlite(path) self.__sql.realizarAltas('snippet',{"title": 0, "language": 1, "contens":2, "tags":3, "description":4, "creation":5, "starred":6, "reference":7, "modified":8, "uploader":9},snippets) return True
def __init__(self, config, query_path, sql_subs, db_connection_string): """Sets up a new category of BNA destinations and retrieves data from the given db table config -- dictionary of config settings (usually from yaml passed to parent BNA object) query_path -- path to the SQL file for destination calculations sql_subs -- dictionary of SQL substitutes from the main BNA return: None """ DBUtils.__init__(self, db_connection_string) self.config = config self.query_path = query_path self.category = self.config["name"] self.schema, self.table = self.parse_table_name(self.config["table"]) if self.schema is None: self.schema = self.get_schema(self.table) self.method = self.config["method"] self.hs_column_name = self.category + "_hs" self.ls_column_name = self.category + "_ls" self.score_column_name = self.category + "_score" # self.set_destinations() self.query = self._choose_query(config, sql_subs)
def __init__(self, config=None, verbose=False, debug=False, host=None, db_name=None, user=None, password=None): """ Reads the config file and sets up a connection to the database args config -- path to the config file verbose -- output useful messages debug -- set to debug mode host -- hostname or address (overrides the config file if given) db -- name of database on server (overrides the config file if given) user -- username to connect to database (overrides the config file if given) password -- password to connect to database (overrides the config file if given) """ Conf.__init__(self) self.verbose = verbose self.debug = debug self.module_dir = os.path.dirname(os.path.abspath(__file__)) if config is None: config = os.path.join(self.module_dir, "config.yaml") self.config = self.parse_config(yaml.safe_load(open(config))) print("Connecting to database") if host is None: host = self.config.db.host if db_name is None: db_name = self.config.db.dbname if user is None: user = self.config.db.user if password is None: password = self.config.db.password db_connection_string = " ".join([ "dbname=" + db_name, "user="******"host=" + host, "password="******"DB connection: %s" % db_connection_string) DBUtils.__init__(self, db_connection_string, self.verbose, self.debug) self.sql_subs = self.make_bna_substitutions(self.config) # mi/km if "units" in self.config: if self.config.units == "mi": self.km = False elif self.config.units == "km": self.km = True else: raise ValueError("Invalid units \"{}\" in config".format( self.config.units)) else: self.km = False
def __init__(self, shell, entry_point, sc, sqlContext, displayHTML): # ugly, but not possible to differentiate <= 8.2 from >= 8.3 try: self._dbutils = DBUtils(shell, entry_point) except: self._dbutils = DBUtils(shell, entry_point, sc, sqlContext, displayHTML) self.fs = self._dbutils.fs self.secrets = self._dbutils.secrets self.notebook = Notebook()
def __init__(self): DBUtils.__init__(self,"") self.config = None self.net_config = None self.verbose = None self.debug = None self.srid = None self.net_blocks = None self.module_dir = None self.db_connectivity_table = None self.db_connection_string = None
def __init__(self): # class instances self._DBUtils = DBUtils() self._PT = PathTools() self._Configs = Configurations() # private instances self._BD = None # diccionario con todas las instancia de objeto Snippet self._Snippets = None # objeto snippet mostrado actualmente en GUI self._SnippetActual = None # Snippet # esta variable, se utilizara para saber si la instancia de # snippetmanager se creo correctamente con una bd determinada # o se creo vacia, indicando que no se puede realizar ninguna # operacion sobre la aplicacion self._estado = False # lista con las rutas de las base de datos # tanto las del pathdefault como del cfg file self._AllPathDBs = [] self.loadAllPathDBs() # trae si existe, el valor de la bd a cargar por defecto defaultBdName = self._Configs.defaultBdName
def get_global_settings(self, organization_id): """ Function to get default settings from configurations table. :param organization_id: String :return: Default/Global Settings and True if success, else False and error message """ q_resource_settings = """select settings from resource_settings rs, organizations org where rs.resource_id = org.id and resource_type = 'ORGANIZATION' and setting_group = 'global_settings' and org.orgid = %s """ self.__logger.info("Getting global_settings") with DBUtils(self.__config['application']) as db_conn: self.__logger.debug('Connected to database') self.__logger.debug("Running Query: %s" % q_resource_settings % (organization_id,)) result = db_conn.execute_query(q_resource_settings, (organization_id,)) self.__logger.debug("Result received: %s" % result) # Checking global_settings for the given organization if result is None or len(result) == 0: self.__logger.warning("No global_settings set for organization : %s." % organization_id) self.__logger.debug("Using default settings") default_query = """select * from get_config_json( %s, %s)""" try: with DBUtils(self.__config['application']) as db_conn: self.__logger.debug('Connected to database') self.__logger.debug("Running Query: %s" % default_query % (organization_id, "DefaultSettings")) default_settings = db_conn.execute_query(default_query, (organization_id, "DefaultSettings")) self.__logger.debug("Result received: %s" % default_settings) # Sanity Check if default_settings is None or len(default_settings) == 0: error = "No global settings found" errors = {"error_msg": [error], "identifiers":['']} self.__logger.error(error) return False, errors settings = default_settings[0].get('get_config_json') self.__logger.debug("Result global_settings: {0}".format(str(settings))) return True, settings except Exception as ex: error = "Could not get default settings" errors = {"error_msg": [error], "identifiers":['']} self.__logger.error("Could not get default settings. Exception: {0}".format(ex)) return False, errors else: settings = result[0].get('settings') self.__logger.debug("Result global_settings: {0}".format(str(settings))) return True, settings
def test_connect_success(self): """Test to check validate successful connection""" with DBUtils(json.loads(DBCONFIG)) as dbobj: result = dbobj.execute_query('select version();') self.assertEqual( result[0].get('version'), 'PostgreSQL 9.5.4 on x86_64-pc-linux-gnu, ' + 'compiled by gcc (GCC) 4.8.2 20140120 ' + '(Red Hat 4.8.2-16), 64-bit')
def show_stage_complete_screen(self): screen_w, screen_h = GameSpecs.SCREEN_SIZE x, y = screen_w/2, screen_h/2 PermanentText("Stage Complete!", (x, 270), self) PermanentText("Score\t" + str(self.score).zfill(6), (x, 300), self) PermanentText("TOP SCORES", (x, y), self) DBUtils.record_new_score(self.score, "Prime User") high_scores = DBUtils.get_top_3_scores() for score, name in high_scores: y += 30 PermanentText(name.ljust(15) + "\t" + str(score).zfill(6), (x, y), self) frames_remaining = 3000 while frames_remaining > 0: self.screen.fill((0, 0, 0)) for text in self.text_sprites: text.draw(self.screen, text.get_position()) pygame.display.flip() frames_remaining -= 1 self.check_restart_quit()
def execute_query(self, built_query, parameters=None): """ Function to execute query with parameters :param built_query: Query to execute :param parameters: parameters to be used :return: result of query, or empty list """ with DBUtils((self.__config['application'])) as dbConn: if not parameters: result = dbConn.execute_query(built_query) else: result = dbConn.execute_query(built_query, parameters) self.__logger.debug("Ran query: {0}".format(dbConn.query)) return result
def get_config_details(self, collect_id): """ Function to get config details from warehouse :param collect_id: collect id that was used in collects table :return: returns configuration details if success, else None """ self.__logger.debug("Executing function get_config_details for collect_id: {0}".format(collect_id)) with DBUtils((self.__config['application'])) as dbConn: collect_row_result = dbConn.execute_query(self.__collect_details_query, (collect_id,)) self.__logger.debug("Ran query: {0}".format(dbConn.query)) if not collect_row_result: self.__logger.error("No data found in collects table for collect_id: {0}".format(collect_id)) self.__logger.info('Exiting script.') return None # LIST OF DICT, GETTING FIRST COLUMN collect_row = collect_row_result[0] self.__logger.debug("Result: {0}".format(str(collect_row))) return collect_row
def get_default_configurations(self, organization_id, config_name): """ Get configuration against organization for a configuration name :param organization_id: Organization id against which configuration is required :param config_name: Name of configuration :return: True and config dict if success, else false and error message """ with DBUtils((self.__config['application'])) as dbConn: result = dbConn.execute_query('select get_config_json as default_config from ' 'get_config_json(%s, %s)', (organization_id, config_name)) self.__logger.debug("Ran query: {0}".format(dbConn.query)) if not result: error_msg = "No default configuration found in configurations table for config type: 'FilterFields'" errors = {"error_msg": [error_msg], "identifiers":['']} self.__logger.error(error_msg) return False, errors result_config = result[0] self.__logger.debug("Result FilterFields: {0}".format(str(result_config))) return True, result_config
def get_import_configuration(self, config_id, file_type): """ Function to get import configuration :param config_id: config id that was used in collects :param file_type: Machine, Storage, Physical Machine or Virtual Machine :return: configuration if true, else None """ self.__logger.debug("Executing function get_import_configuration") if config_id == 0 and file_type == 'Virtual Machine': query = "select get_config_json->>'vm' as mapped_headers,'t' as is_first_row_header," \ " 'virtual_machine' as import_entity_type, 0 as headers_count, 'comma' as " \ "column_separator from get_config_json('default','MappedHeaders')" elif config_id == 0 and file_type == 'Physical Machine': query = "select get_config_json->>'physical' as mapped_headers,'t' as is_first_row_header," \ " 'physical_machine' as import_entity_type, 0 as headers_count, 'comma' as " \ "column_separator from get_config_json('default','MappedHeaders')" elif config_id == 0 and file_type == 'License': query = "select get_config_json->>'license' as mapped_headers,'t' as is_first_row_header," \ " 'license' as import_entity_type, 0 as headers_count, 'comma' as " \ "column_separator from get_config_json('default','MappedHeaders')" else: query = "select is_first_row_header, import_entity_type, mapped_headers, headers_count, " \ "column_separator, update_only from import_configurations where config_id=%s" with DBUtils((self.__config['application'])) as dbConn: if config_id: import_configuration_result = dbConn.execute_query(query,(config_id,)) else: import_configuration_result = dbConn.execute_query(query) self.__logger.debug("Ran query: {0}".format(dbConn.query)) if not import_configuration_result: error = "Configuration not found" self.__logger.error("Configuration not found for config id: {}".format(config_id)) errors = {"error_msg": [error], "identifiers":['']} return False, errors import_configuration = import_configuration_result[0] self.__logger.debug("Result import_configuration: {0}".format(str(import_configuration))) return True, import_configuration
def __init__(self, shell, entry_point): self._dbutils = DBUtils(shell, entry_point) self.fs = self._dbutils.fs self.secrets = self._dbutils.secrets self.notebook = Notebook()
def __init__(self): DBUtils.__init__(self, "") self.segment_subs = None
def __init__(self, config=None, force_net_build=False, verbose=False, debug=False, host=None, db_name=None, user=None, password=None): """Connects to the BNA database kwargs: config -- path to the config file, if not given use the default config.yaml force_net_build -- force a rebuild of the network even if an existing one is found verbose -- output useful messages debug -- set to debug mode host -- hostname or address (overrides the config file if given) db -- name of database on server (overrides the config file if given) user -- username to connect to database (overrides the config file if given) password -- password to connect to database (overrides the config file if given) return: pyBNA object """ Destinations.__init__(self) Connectivity.__init__(self) Core.__init__(self) Conf.__init__(self) self.verbose = verbose self.debug = debug self.module_dir = os.path.dirname(os.path.abspath(__file__)) if config is None: config = os.path.join(self.module_dir, "config.yaml") self.config = self.parse_config(yaml.safe_load(open(config))) self.config["bna"]["connectivity"]["max_detour"] = float( 100 + self.config["bna"]["connectivity"]["max_detour"]) / 100 self.db_connectivity_table = self.config["bna"]["connectivity"][ "table"] self.net_config = self.config["bna"]["network"] # km/mi if "units" in self.config: if self.config.units == "mi": self.km = False elif self.config.units == "km": self.km = True else: raise ValueError("Invalid units \"{}\" in config".format( self.config.units)) else: self.km = False if self.verbose: print("") print("---------------pyBNA---------------") print(" Create and test BNA scenarios") print("-----------------------------------") print("") # set up db connection print("Connecting to database") if host is None: host = self.config["db"]["host"] if db_name is None: db_name = self.config["db"]["dbname"] if user is None: user = self.config["db"]["user"] if password is None: password = self.config["db"]["password"] db_connection_string = " ".join([ "dbname=" + db_name, "user="******"host=" + host, "password="******"DB connection: %s" % db_connection_string) DBUtils.__init__(self, db_connection_string, self.verbose, self.debug) # srid if "srid" in self.config: self.srid = self.config["srid"] elif not self.debug: self.srid = self.get_srid(self.config.bna.blocks.table) # destinations self.destinations = dict() self.destination_blocks = set() if not self.debug: pass # self.set_destinations() self.sql_subs = self.make_bna_substitutions(self.config) if force_net_build: print("Building network tables in database") self.build_network() elif self.debug: pass elif not self.check_network(): print("Network tables not found in database...building") self.build_network() elif self.verbose: print("Network tables found in database")
def dbcontext(progressbar=True): """Create a databricks context The following objects will be created - Spark Session - Spark Context - Spark Hive Context - DBUtils (fs module only) Args: progressbar (bool, optional): If True the spark progressbar will be installed. Defaults to True. """ def get_sparkui_url(host, organisation, clusterId): if organisation is None: sparkUi = "%s#/setting/clusters/%s/sparkUi" % (host, clusterId) else: sparkUi = "%s/?o=%s#/setting/clusters/%s/sparkUi" % ( host, organisation, clusterId) return sparkUi def show_status(spark, sparkUi): output = """ <div> <dl> <dt>Spark Version</dt><dd>{sc.version}</dd> <dt>Spark Application</dt><dd>{sc.appName}</dd> <dt>Spark UI</dt><dd><a href="{sparkUi}">go to ...</a></dd> </dl> </div> """.format( sc=spark.sparkContext, sparkUi=get_sparkui_url(host, organisation, clusterId), num_executors=len(spark.sparkContext._jsc.sc().statusTracker(). getExecutorInfos()), ) display(HTML(output)) # Get the configuration injected by the client # profile = os.environ.get("DBJL_PROFILE", None) host = os.environ.get("DBJL_HOST", None) clusterId = os.environ.get("DBJL_CLUSTER", None) organisation = os.environ.get("DBJL_ORG", None) sparkUi = get_sparkui_url(host, organisation, clusterId) if not is_remote(): return "This is not a remote Databricks kernel" ip = get_ipython() spark = ip.user_ns.get("spark") if spark is not None: print("Spark context already exists") load_css() show_status(spark, sparkUi) return None # Create a Databricks virtual python environment and start thew py4j gateway # token = getpass.getpass( "Creating a Spark execution context:\nEnter personal access token for profile '%s'" % profile) try: command = Command(url=host, cluster_id=clusterId, token=token) except DatabricksApiException as ex: print(ex) return None print("Gateway created for cluster '%s' " % (clusterId), end="", flush=True) # Fetch auth_token and gateway port ... # cmd = 'c=sc._gateway.client.gateway_client; print(c.gateway_parameters.auth_token + "|" + str(c.port))' result = command.execute(cmd) if result[0] != 0: print(result[1]) return None auth_token, port = result[1].split("|") port = int(port) interpreter = "/databricks/python/bin/python" # Ensure that driver and executors use the same python # os.environ["PYSPARK_PYTHON"] = interpreter os.environ["PYSPARK_DRIVER_PYTHON"] = interpreter # ... and connect to this gateway # gateway = get_existing_gateway(port, True, auth_token) print(". connected") # print("Python interpreter: %s" % interpreter) # Retrieve spark session, sqlContext and sparkContext # conf = SparkConf(_jconf=gateway.entry_point.getSparkConf()) sqlContext = RemoteContext(gateway=gateway, conf=conf) sqlContext = HiveContext(sqlContext, gateway.entry_point.getSQLContext()) spark = sqlContext.sparkSession sc = spark.sparkContext # Enable pretty printing of dataframes # spark.conf.set("spark.sql.repl.eagerEval.enabled", "true") # Define a separate pool for the fair scheduler # Todo: Find a better way to store pool_id instead of this hack # job_info = JobInfo(str(random.getrandbits(64))) # Patch the remote spark UI into the _repr_html_ call # def repr_html(uiWebUrl): def sc_repr_html(): return """ <div> <p><b>SparkContext</b></p> <p><a href="{uiWebUrl}">Spark UI</a></p> <dl> <dt>Version</dt><dd><code>v{sc.version}</code></dd> <dt>AppName</dt><dd><code>{sc.appName}</code></dd> <dt>Master</dt><dd><code>{sc.master}</code></dd> </dl> </div> """.format(sc=spark.sparkContext, uiWebUrl=uiWebUrl) return sc_repr_html sc_repr_html = repr_html(sparkUi) sc._repr_html_ = sc_repr_html # Monkey patch Databricks Cli to allow mlflow tracking with the credentials provided # by this routine # Only necessary when mlflow is installed # try: from databricks_cli.configure.provider import ProfileConfigProvider, DatabricksConfig def get_config(self): config = DatabricksConfig(host, None, None, token, False) if config.is_valid: return config return None ProfileConfigProvider.get_config = get_config except: pass # Initialize the ipython shell with spark context # shell = get_ipython() shell.sc = sc shell.sqlContext = sqlContext shell.displayHTML = lambda html: display(HTML(html)) # Retrieve the py4j gateway entrypoint # entry_point = spark.sparkContext._gateway.entry_point # Initialize dbutils # dbutils = DBUtils(shell, entry_point) # Setting up Spark progress bar # if progressbar: # print("Set up Spark progress bar") load_progressbar(ip, sc, job_info) load_css() # Register sql magic # ip.register_magic_function(sql, magic_kind="line_cell") # Ensure that the virtual python environment and py4j gateway gets shut down # when the python interpreter shuts down # def shutdown_kernel(command): def handler(): from IPython import get_ipython ip = get_ipython() ip = get_ipython() if ip.user_ns.get("spark", None) is not None: del ip.user_ns["spark"] if ip.user_ns.get("sc", None) is not None: del ip.user_ns["sc"] if ip.user_ns.get("sqlContext", None) is not None: del ip.user_ns["sqlContext"] if ip.user_ns.get("dbutils", None) is not None: del ip.user_ns["dbutils"] # Context is a singleton command.close() return handler atexit.register(shutdown_kernel(command)) # Forward spark variables to the user namespace # ip.user_ns["spark"] = spark ip.user_ns["sc"] = sc ip.user_ns["sqlContext"] = sqlContext ip.user_ns["dbutils"] = dbutils ip.user_ns["dbbrowser"] = DatabricksBrowser(spark, dbutils) print("The following global variables have been created:") print("- spark Spark session") print("- sc Spark context") print("- sqlContext Hive Context") print("- dbutils Databricks utilities (filesystem access only)") print("- dbbrowser Allows to browse dbfs and databases:") print(" - dbbrowser.dbfs()") print(" - dbbrowser.databases()\n") show_status(spark, sparkUi) return None
class SnippetManagerBase: ''' Clase que hace de wrapper entre las clases de la logica del programa, con la clase Fragmentos''' def __init__(self): # class instances self._DBUtils = DBUtils() self._PT = PathTools() self._Configs = Configurations() # private instances self._BD = None # diccionario con todas las instancia de objeto Snippet self._Snippets = None # objeto snippet mostrado actualmente en GUI self._SnippetActual = None # Snippet # esta variable, se utilizara para saber si la instancia de # snippetmanager se creo correctamente con una bd determinada # o se creo vacia, indicando que no se puede realizar ninguna # operacion sobre la aplicacion self._estado = False # lista con las rutas de las base de datos # tanto las del pathdefault como del cfg file self._AllPathDBs = [] self.loadAllPathDBs() # trae si existe, el valor de la bd a cargar por defecto defaultBdName = self._Configs.defaultBdName ########################## ## Metodos de instancia ## ########################## def agregarSnippet(self, datosSnippet): ''' Recibe un dicionario de los datos de lo que sera un nuevo snippet y lo agrega a la BD.''' # llama al metodo de bd para agregar un snippet, devolviendo # el resultado de la operacion como boolean y en caso de error, # el mensaje del error. resultado, mensaje = self._BD.agregarSnippet(datosSnippet) if resultado: # crea una instancia del nuevo snippet newSnippet = Snippet(datosSnippet, self._BD) # agrega el nuevo snippet a los ya existentes self._addNewSnippetToCollection(newSnippet) # retorna que la operacion fue exitosa, # y ningun mensaje de error return True, None else: # retorna que la operacion no fue exitosa, y # el mensaje de error devuelto por bd return False,mensaje def eliminarSnippet(self, unSnippet): ''' Manda a eliminarSnippet de la Bd que borre el snippet segun su titulo y lenguaje.''' # llama al metodo de bd para eliminar un snippet # y devuelve un booleano con el resultado de la operacion. if self._BD.eliminarSnippet( unSnippet.titulo, unSnippet.lenguaje): # quita del diccionario el snippet self._Snippets.pop((unSnippet.lenguaje, unSnippet.titulo)) # establece como actual snippet a None self._SnippetActual = None return True else: return False def modificarSnippet(self, clave_spviejo, snippet_nuevo): ''' Actualiza el snippet cargado en memoria''' del self._Snippets[clave_spviejo] self._Snippets[ (snippet_nuevo.lenguaje,snippet_nuevo.titulo) ] = snippet_nuevo def newSnippet(self, tuplaSnippet): ''' Crea una instancia de snippet. ''' # a partir de los valores que vienen en la tupla, # se crea una instancia de snippet con dichos valores. nuevoSnippet = Snippet({ 'title':tuplaSnippet[0], 'language':tuplaSnippet[1], 'tags':tuplaSnippet[2], 'contens':tuplaSnippet[3], 'description':tuplaSnippet[4], 'creation':tuplaSnippet[5], 'reference':tuplaSnippet[6], 'modified':tuplaSnippet[7], 'uploader':tuplaSnippet[8], 'starred':tuplaSnippet[9]}, self._BD) # tupla que sera de clave en el diccionario de los snippets clave = (tuplaSnippet[1],tuplaSnippet[0]) elemento_diccionario = (clave,nuevoSnippet) return elemento_diccionario def _addNewSnippetToCollection(self, newSnippet): ''' Agrega el nuevo snippet creado a la coleccion actual de snippets. ''' self._Snippets.update( {(newSnippet.lenguaje, newSnippet.titulo):newSnippet}) ################# ## Metodos Get ## ################# def getAllLenguajes(self): ''' Obtiene una lista de los lenguajes desde la bd.''' # obtiene desde la actual instancia de bd los lenguajes existentes all_lenguajes = self._BD.getLenguajes() lenguajes = [] # saca de la tupla y carga en la lista los lenguajes obtenidos #~ for lenguaje in all_lenguajes: #~ lenguajes.append(lenguaje[0]) map(lambda lenguaje: lenguajes.append(lenguaje[0]), all_lenguajes) return lenguajes def getBDNames(self): ''' Obtiene una lista con los nombres de los archivos bds.''' databases_dir = self._DBUtils.getBDsNamesDatabasesDir() cfg_file = self._Configs.getDBsNamesCFGReferences() couch_dbs = [name + ' [CouchDB]'for name in self._Configs.getNamesCouch()] return databases_dir + cfg_file + couch_dbs def getDB(self): return self._BD def getAllSnippets(self): ''' Obtiene los snippets desde la bd y carga en un diccionario los snippets en formato objeto Snippet().''' # obtiene desde la bd todos los snippets, # orden en que vienen los campos # 1-title,2-language,3-tags,4-contens,5-description # 6-creation,7-reference,8-modified,9-uploader,10-starred all_snippets = self._BD.getAllSnippets() # se aplica map para crear por cada tupla obtenida desde la bd # una tupla de tuplas donde el formato resultante es: # (claveSnippet : instanciaSnippet) todos_los_snippets = map(self.newSnippet,all_snippets) # dict(), convierte la tupla de tuplas a diccionario return dict(todos_los_snippets) def getInstanceState(self): ''' ''' return self._estado def getIndexBdName(self, bdName): ''' Busca en la lista de bds la ocurrencia de la primer bd que coincida con el nombre del parametro <bdName>, devolviendo la posicion en que se encuentra. Devuelve -1 si no se encuentra. ''' return self._AllPathDBs.index(bdName) def getLengsAndTitles(self,consulta=None, favorito = None): ''' Obtiene los snippets por lenguajes desde la bd.''' #~ tagsPresicion = bool(self._DBUtils.configs.searchPresitionTags) tagsPresicion = False return self._BD.getLengAndTitles(consulta, favorito, tagsPresicion) def getSnippet(self, lenguaje, titulo): ''' Obtiene un snippet por su lenguaje y titulo correspondiente. ''' try: # del diccionario donde estan todas las instancias de snippet, # a partir de la clave tupla, recupera la instancia # con lenguaje y titulo indicado snippet = self._Snippets[(lenguaje,titulo)] # establece como instancia actual en uso, la instancia obtenida self.setSnippetActual(snippet) except Exception, msg: # si el snippet no esta en el diccionario, devuelve None snippet = None self.setSnippetActual(snippet) print 'getSnippet Error: ',msg return snippet
import os from dotenv import load_dotenv, find_dotenv from odoa import ODOA from telegram.ext import Updater, CommandHandler from dbutils import DBUtils odoa = ODOA() load_dotenv(find_dotenv()) logging.basicConfig( format='%(asctime)s - %(name)s - %(levelname)s - %(message)s', level=logging.INFO, ) logger = logging.getLogger(__name__) db = DBUtils(db_name=os.getenv('DB')) def get_surah(): surah = odoa.get_random_surah() ayah = surah.ayah.decode('utf8') description = surah.desc translate = surah.translate message = f'{description}\n\n{ayah}\n\n{translate}' return message def start_handler(bot, update): username = update.message.from_user.username message = (f'Hi {username},\n\n' f'SaHaDa akan mengirimkan 2 surat beserta terjemahan setiap '
def __init__(self): self.__bdmanager = DBUtils()
def __init__(self): DBUtils.__init__(self, "") self.config = None self.verbose = None self.debug = None self.srid = None
def test_connect_incorrect_creds(self): """Test to validate incorrect credentials""" with self.assertRaises(psycopg2.OperationalError): with DBUtils(json.loads(DBCONFIG_WRONGCRED)) as dbobj: pass
create index site_index on filtered_data_index(site_type); create index keyword_index on filtered_data_index(keyword); create index timestamp_index on filtered_data_index(timestamp); create index site_sum_index on sumdata_byday(site_type); create index keyword_sum_index on sumdata_byday(keyword); create index timestamp_sum_index on sumdata_byday(timestamp); """ import os, sys from datetime import datetime from dbutils import DBUtils from strutils import get_timestamp print datetime.now() if len(sys.argv) < 2: timestamp = get_timestamp(day_delta=1) else: timestamp = sys.argv[1] db = DBUtils() c = db.execute_sql( "select count(*),site_type,keyword,timestamp where timestamp=%s group by keyword,site_type" % timestamp ) for one in c.fetchall(): count, site_type, timestamp = one values = {"timestamp": timestamp, "count": count, "keyword": keyword, "site_type": site_type} db.insert("sumdata_byday", values) db.close()
blogurl varchar(255) NULL, blogt varchar(255) NULL, date DATE NULL, time TIME NULL, url varchar(255) NULL, keyword varchar(255) NULL, title varchar(255) NULL, article TEXT NULL );\n create index site_index_%(timestamp)s on %(table_name)s(site);\n create index date_index_%(timestamp)s on %(table_name)s(date);\n """ DATA_ROOT = "/data1/dspider/data/bak/%s/" % timestamp db = DBUtils() db.execute_sql(CREATE_SQL % {"table_name": table_name, "timestamp": timestamp}) db.close() db = DBUtils() for pathname in os.listdir(DATA_ROOT): path = os.path.join(DATA_ROOT, pathname) for filename in os.listdir(path): data_file = os.path.join(path, filename) try: f = open(data_file, "r") column = {} while 1: line = f.readline() if not line: f.close()
for word in keywords: if title.find(word) != -1 or article.find(word) != -1: values = {'keywords':word,'site_type':site_type,'timestamp':timestamp,'index_table':table_name,'index_id':data_id} print values.items() db.insert(table='filtered_data_index',values=values) start_time = datetime.now() if len(sys.argv) < 2: timestamp = get_timestamp(day_delta=1) else: timestamp = sys.argv[1] table_name = "raw_data_%s" % timestamp db = DBUtils() keywords = [] c = db.select(table='brand') for one in c.fetchall(): id,name,buss = one keywords.append(name) site_types = {} c = db.select(table='sites') for one in c.fetchall(): id,name,url,site_type = one site_types[url] = site_type print site_types.items() cursor = db.select(table=table_name,columns=["count(*)"])
def __init__(self, config=None, create_lookups=True, verbose=False): """ Reads the config file, sets up a connection args config -- path to the config file, if not given use the default config.yaml create_lookups -- creates lookup tables in the db if none are found verbose -- output useful messages """ Conf.__init__(self) self.verbose = verbose self.module_dir = os.path.dirname(os.path.abspath(__file__)) if config is None: config = os.path.join(self.module_dir, "config.yaml") self.config = self.parse_config(yaml.safe_load(open(config))) print("Connecting to database") host = self.config.db.host db_name = self.config.db.dbname user = self.config.db.user password = self.config.db.password db_connection_string = " ".join([ "dbname=" + db_name, "user="******"host=" + host, "password="******"Checking lookup tables") missing = self._missing_lookup_tables() if create_lookups and len(missing) > 0: for t in missing: self._create_lookup_table(*t) # add functions to db self._run_sql_script("bna_CompareAzimuths.sql", dict(), dirs=["sql", "stress", "db_functions"]) self._run_sql_script("bna_IsCorridor.sql", dict(), dirs=["sql", "stress", "db_functions"]) self._run_sql_script("bna_MultiEndPoint.sql", dict(), dirs=["sql", "stress", "db_functions"]) self._run_sql_script("bna_MultiStartPoint.sql", dict(), dirs=["sql", "stress", "db_functions"]) # build SQL substitutions self.segment_subs = dict() for direction in [FORWARD_DIRECTION, BACKWARD_DIRECTION]: self.segment_subs[ direction] = self._build_segment_sql_substitutions(direction) self.crossing_subs = dict() for direction in [FORWARD_DIRECTION, BACKWARD_DIRECTION]: self.crossing_subs[ direction] = self._build_crossing_sql_substitutions(direction)