class db_connection: def __init__(self, service, collection='binance'): self.service = service self.collection = collection self.port = name_to_port[service.lower()] if _config.SERVER: self.tunnel = server_null_tunnel(self.port) else: self.tunnel = tunnel_config(self.port) self.tunnel.start() if self.service == 'mongo': self.engine = MongoClient( "mongodb://%s:%[email protected]:%i/%s?authSource=%s" % (_config.DB_USER, _config.DB_PW, self.tunnel.local_bind_port, _config.DB_USER, _config.DB_USER)) self.client = self.engine['ehens86'][self.collection] elif self.service == 'psql': params = { 'database': _config.DB_USER, 'user': _config.DB_USER, 'password': _config.DB_PW, 'host': 'localhost', 'port': self.tunnel.local_bind_port } self.engine = psycopg2.connect(**params) self.client = self.engine.cursor() def disconnect(self): self.engine.close() self.tunnel.stop() def reset_db_con(self): if self.service == 'mongo': self.engine = MongoClient( "mongodb://%s:%[email protected]:%i/%s?authSource=%s" % (_config.DB_USER, _config.DB_PW, self.tunnel.local_bind_port, _config.DB_USER, _config.DB_USER)) self.client = self.engine['ehens86'][self.collection] elif self.service == 'psql': params = { 'database': _config.DB_USER, 'user': _config.DB_USER, 'password': _config.DB_PW, 'host': 'localhost', 'port': self.tunnel.local_bind_port } self.engine = psycopg2.connect(**params) self.client = self.engine.cursor()
query = ("SELECT tpl.id, tpl.description FROM template_facility_airlines tpl " "INNER JOIN airlines a ON tpl.template_airline_id = a.template_airline_id " "WHERE a.id = %s") # cursor.execute(query, (1,)) # for row in cursor: # print(row) # # engine = create_engine('mysql+mysqlconnector://root:root@localhost:3306/ikhram') # conn = engine.connect() airline_list = defaultdict(list) t = time.time() with open('maskapai.csv', 'wb') as csvfile: spamwriter = csv.writer(csvfile, delimiter=' ', quotechar='\"', quoting=csv.QUOTE_MINIMAL) for ridx in idx: cursor = conn.cursor(buffered=True) air_id = db.umrahs.find_one({'packet_id': ridx.get('packet_id')}).get('airline_id') cursor.execute(query, (air_id, )) for (tpl_id, name) in cursor: airline_list[str(ridx)].append(str(name)) # spamwriter.writerow([str(ridx.get('promo_id')), str(ridx.get('packet_id')), str(tpl_id), str(name)]) # print(str(ridx.get('promo_id')) + ' --- ' + str(ridx.get('packet_id')) + ' --- template_id: ' + str(tpl_id) + 'Maskapai: ' + str(name)) cursor.close() print(time.time()) print((time.time()) - t) conn.close() print(airline_list)
class Connector(object): def __init__(self, filepath): self.db_path = filepath self.db_name, self.db_type = self.db_path.split("/")[-1].split(".") #print self.db_type #self.filter = set(["data", "id"]) #self.convert() self.__connect__() if self.db_type == "db": self.data = self.to_cursor["data"] self.schema = self.to_cursor["schema"] else: self.data = self.cursor["data"] self.schema = self.cursor["schema"] def db_exists(self): if self.db_type == "json": return bool(self.db_name in self.conn.database_names()) else: return bool(os.path.isfile(self.db_path)) def format_data(self): '''return json''' return self.data.find({"_id":False}) def __connect__(self): '''retrieving cursor for database''' if self.db_type == "json": #print ("#json ==> sqlite") self.conn = MongoClient('mongodb://localhost,localhost:27017') self.cursor = self.conn[str(self.db_name)] if self.db_exists(): self.conn.drop_database(self.db_name) self.cursor = self.conn[str(self.db_name)] self.db_path = self.db_path.replace(".json", ".db") self.to_conn = sqlite3.connect(self.db_path) self.to_cursor = self.to_conn.cursor() elif self.db_type == "db": self.conn = sqlite3.connect(self.db_path) self.cursor = self.conn.cursor() self.to_conn = MongoClient('mongodb://localhost,localhost:27017') self.to_cursor = self.to_conn[str(self.db_name)] return (self.conn, self.cursor, self.to_conn, self.to_cursor) def import_json(self): with open(self.db_path, "r") as f: data = json.load(f) try: self.schema = data["shema"] except KeyError: self.schema = None try: self.data = [n for n in data["data"]] except KeyError: self.data = data return self def store_schema(self): '''building the SQLITE instructions into a file''' if self.db_type == "db": logging.info("building db schema from sqlite to JSON") self.schema = defaultdict(dict) cmd = "SELECT sql from sqlite_master WHERE type = 'table'" txt = [] for table in self.tables: cmd = "SELECT sql from sqlite_master WHERE type = 'table' and name = '%s';" %table #print [line[0] for line in self.cursor.execute(cmd)] txt.extend([line[0] for line in self.cursor.execute(cmd)]) txt = "\n".join(txt) schema_f = "/".join(self.db_path.split(".")[:-1])+"_schema.txt" with open(schema_f, "w") as f: f.write(txt) self.__close__() def build_schema(self): '''building schema type''' if self.db_type == "db": logging.info("building db schema from sqlite to JSON") self.schema = [] for tbl_name in self.tables: cmd = "SELECT sql from sqlite_master WHERE type = 'table' and name = '%s';" %tbl_name key = [line[0] for line in self.cursor.execute(cmd)][0] key = re.sub(r"\t|\n|\`", " ", key.decode("utf-8")) data = {tbl_name:[]} #print data values = re.split(", ", re.split("\(|\)",key)[1]) for item in values: try: k,v = item.split(" ") #fieldname, fieldtype data[tbl_name].append({"fieldname":k, "fieldtype":v}) except: k, v, default_value = item.split(" ") data[tbl_name].append({"fieldname":k, "fieldtype":v, "default_value":default_value}) self.schema.append(data) self.to_cursor["schema"].insert_many(self.schema) else: #retrieve from mongo self.schema = self.to_cursor["schema"] #~ if self.type == "json": #~ logging.info("building db schema from JSON to sqlite") #~ #~ #~ self.schema = defaultdict(dict) #~ #self.tables = [k for k,v in self.cursor.find_one()] #~ for n in self.tables: #~ self.cursor.find({n:{"$exists":True}}, {n:1, "id":1}) #~ #~ #for item in self.cursor.find() #~ #~ return NotImplementedError return self.schema def select_tables(self): '''get the tables names or the key of the datastore''' logging.info("select tables") self.tables = [] if self.db_type == "db": #self.tables_list = [] self.conn = sqlite3.connect(self.db_path) self.cursor = self.conn.cursor() for t in self.cursor.execute("SELECT * FROM sqlite_master WHERE type='table'"): self.tables.append(t[2]) #self.filter_tables() else: #to verify self.tables = self.cursor["data"].find_one().keys() #print self.tables schema = [] print self.cursor["schema"].find_one() #~ for k in i.keys(): #~ schema.append(k) #~ #~ #~ print schema #~ self.tables.append(k)#to verify #self.tables_list = self.cursor.distinct("table") return self.tables def filter_tables(self): '''select only authorized table that contains data and id''' logging.info("filter tables") self.build_schema() for xtable, tbl_name in enumerate(self.tables): if "cluster" in tbl_name: #print "OK cluser", tbl_name self.tables.pop(xtable) #~ #print self.schema[tbl_name] if set(["data", "id"]) <= set(self.schema[tbl_name].keys()): self.tables.pop(xtable) return self.tables def convert2mongo(self): self.convert2json() try: self.conn = MongoClient('mongodb://localhost,localhost:27017') self.cursor = self.conn[str(self.db_name)] except: logging.warning("Failed to connect to dbfile %s. No such a file" %self.db_name) print "Not connected to", self.db_name return {"data":[]} #self.cursor["schema"].insert_many(self.schema) self.cursor["data"].insert_many(self.data) logging.info("Inserted %i rows into data collection of db %s" %(self.cursor["data"].count(), self.db_name)) #self.cursor["data_editor"].insert_one(self.final_data) return self def convert2json(self): '''convert tabular data into a list of dict''' logging.info("building db values to JSON") self.data = defaultdict(dict) for tbl_name in self.tables: #correspond a mon id ids = "SELECT id, data FROM %s" %tbl_name self.conn = sqlite3.connect(self.db_path) self.cursor = self.conn.cursor() for tid,data in self.cursor.execute(ids): if type(data) == int: self.data[tid][tbl_name] = str(data) else: self.data[tid][tbl_name] = data final_dt = [] for rid, row in self.data.items(): final_dt.append(row) self.data = final_dt self.__close__() return self def convert2sqlite(self): for n in self.data["schema"].find(): print n raise NotImplementedError #~ #self.convert2json() #~ logging.info("building db values to SQLITE") #~ self.final_dt = {"data":[self.data]} #~ return self.final_dt def convert(self): print ("Converting") if self.db_exists(): self.__connect__() self.select_tables() self.build_schema() #self.store_schema() if self.db_type == "db": self.convert2mongo() self.data = self.to_cursor["data"] self.schema = self.to_cursor["schema"] else: self.convert2sqlite() self.data = self.cursor["data"] self.__close__() return self else: raise Exception("Database not found") def __close__(self): '''close current connection''' if self.db_type == "db": logging.info("Closing connection to db %s" %self.db_path) return self.conn.close()
requestResultObj = requests.get(siteDict['siteURL']) #search for searchStr searchPos = requestResultObj.text.find(siteDict['searchStr']) #log result status = "Down" msg = "" if searchPos > -1: status = "Up" else: msg = "Unable to find " + siteDict['searchStr'] + " for " + siteDict['siteURL'] with dbConn: if dbType == "MySQL": dbCur = dbConn.cursor() dbCur.execute("INSERT INTO weblog (siteName, status, msg) VALUES(%s, %s, %s)", (sectionName,status,msg)) if dbType == "MongoDB": weblog = dbCur.weblog weblogEntryObj = { "siteName": sectionName, "status": status, "msg": msg, "created": datetime.datetime.utcnow() } weblog.insert(weblogEntryObj) if status == "Down":
class DatabaseHulk: def __init__(self, dbCredentials): self.dbType = dbCredentials['dbtype'] if self.dbType != 'mongo' and self.dbType != 'mysql': exit('Database type not supported: ', self.dbType) if (self.dbType == 'mongo'): self.db = MongoClient() self.db = self.db[dbCredentials['dbname']] if (self.dbType == 'mysql'): self.db = MySQLdb.connect( host=dbCredentials['host'], user=dbCredentials['user'], passwd=dbCredentials['password'], db=dbCredentials['dbname'], cursorclass=MySQLdb.cursors.SSCursor # this cursor allows one by one fetching # TODO: implement fetching one by one - http://kushaldas.in/posts/fetching-row-by-row-from-mysql-in-python.html ) def createTable( self, newTableName, vars ): # drop table if already exists self.dropTable(newTableName) if (self.dbType == 'mongo'): # TODO: mongo create table exit('NON HAI SCRITTO IL CODICE PER CREARE LA TABELLA IN MONGO') if (self.dbType == 'mysql'): query = 'CREATE TABLE ' + newTableName + ' ( ' for name in vars: if not 'type' in vars[name]: vars[name]['type'] = 'text' query += str(name) + ' ' + vars[name]['type'] + ', ' query = query[:-2] #take away last comma query += ');' # execute creation self.db.cursor().execute( query ) # save changes self.db.commit() def cloneTable( self, originTableName, newTableName, empty=False ): # drop table if already exists self.dropTable(newTableName) if (self.dbType == 'mongo'): # TODO: mongo clone table exit('NON HAI SCRITTO IL CODICE PER CLONARE LA TABELLA IN MONGO') if (self.dbType == 'mysql'): query = 'CREATE TABLE ' + newTableName + ' LIKE ' + originTableName # execute creation self.db.cursor().execute( query ) self.emptyTable(newTableName) # save changes self.db.commit() def dropTable(self, table): if (self.dbType == 'mongo'): # TODO: mongo drop table exit('NON HAI SCRITTO IL CODICE PER CANCELLARE LA TABELLA IN MONGO') if (self.dbType == 'mysql'): # delete data from table query = 'DROP TABLE IF EXISTS ' + table self.db.cursor().execute(query) self.db.commit() def emptyTable(self, table): if (self.dbType == 'mongo'): # TODO: mongo clean table exit('NON HAI SCRITTO IL CODICE PER SVUOTARE LA TABELLA IN MONGO') if (self.dbType == 'mysql'): # delete data from table query = 'TRUNCATE TABLE ' + table self.db.cursor().execute(query) self.db.commit() def insertRecord( self, table, record, commit=True ): # extract keys and values keys = str( tuple( record.keys() ) ) values = str( tuple( record.values() ) ) # take away ' from columns names keys = keys.replace("'", '') if (self.dbType == 'mysql'): # TODO: mysql gives error when there is only one key and value. # build and execute query query = 'INSERT into ' + table + ' ' + keys + ' VALUES ' + values self.db.cursor().execute( query ) # commit=False is used for fast insertion in myql. # in that case using code should call DatabaseHulk.commit() by hand from outside. if( commit ): self.db.commit() if (self.dbType == 'mongo'): # TODO: mongo insert record exit('NON HAI SCRITTO IL CODICE PER INSERIRE IL RECORD IN MONGO') def updateRecord( self, table, newValues, identifiers, commit=True ): if (self.dbType == 'mysql'): # build and query query = 'UPDATE ' + table + ' SET ' for columnName in newValues: query += '`' + columnName + '`="' + str(newValues[columnName]) + '", ' # take away last ', ' query = query[:-2] query += ' WHERE ' for columnName in identifiers: query += '`' + columnName + '`="' + str(identifiers[columnName]) + '" AND ' # take away last 'AND' query = query[:-4] self.db.cursor().execute( query ) # commit=False is used for fast insertion in myql. # in that case using code should call DatabaseHulk.commit() by hand from outside. if( commit ): self.db.commit() if (self.dbType == 'mongo'): # TODO: mongo update record exit('NON HAI SCRITTO IL CODICE PER AGGIORNARE IL RECORD IN MONGO') def commit(self): # TODO: is it necessary for mongo? self.db.commit() def getFromSQL(self, query, getAsDictionary=False): # get data from db cursor = self.db.cursor() cursor.execute(query) data = cursor.fetchall() # get also column names columnNames = [] for col in cursor.description: columnNames.append(col[0]) cursor.close() # clean data (take them away from tuples) return self.cleanSQLdata(data, getAsDictionary, columnNames) def cleanSQLdata(self, data, getAsDictionary, columnNames): cleandata = [] for row in data: if len(row) > 1: cleanrow = [] if getAsDictionary: columnCounter = 0 cleanrow = {} # we need a dict for element in row: if getAsDictionary: columnName = columnNames[columnCounter] cleanrow[columnName] = self.convertToIntIfItIsLong(element) columnCounter += 1 else: cleanrow.append(self.convertToIntIfItIsLong(element)) else: cleanrow = self.convertToIntIfItIsLong(row[0]) cleandata.append(cleanrow) return cleandata def convertToIntIfItIsLong(self, n): # TODO: this method will cause issues if the number is actually long if type(n) is long: return int(n) return n def getDistinctValues(self, table, variable): if (self.dbType == 'mongo'): return self.db[table].distinct(variable) if (self.dbType == 'mysql'): query = 'SELECT DISTINCT `' + variable + '` FROM ' + table return self.getFromSQL(query) def count(self, table, constraints={}): if (self.dbType == 'mongo'): return self.db[table].find(constraints).count() if (self.dbType == 'mysql'): query = 'SELECT COUNT(*) FROM ' + table + ' WHERE' for k, v in constraints.items(): query += ' `' + str(k) + '`="' + str(v) + '" AND' # take away last 'AND' if constraints != {}: query = query[:-4] else: query = query[:-5] print query return self.getFromSQL(query)[0] def getRecords(self, table, constraints={}): if (self.dbType == 'mongo'): return self.db[table].find(constraints, timeout=False) # TODO: we should close the cursor manually after deactivating the timeout if (self.dbType == 'mysql'): query = 'SELECT * FROM ' + table + ' WHERE' for k, v in constraints.items(): query += ' `' + str(k) + '`="' + str(v) + '" AND' # take away last 'AND' query = query[:-4] return self.getFromSQL(query, getAsDictionary=True)
df_test_copy['combined_marker'] = df_test_copy.apply(f, axis=1) df_test_copy.to_csv("data.csv", index=False) #---------------------------connection to postgres----------------------------------- try: conn = psycopg2.connect( "dbname='SensorFusionDatabase' user='******' host='sensorfusiondbinstance.cjz4wrixhzjb.us-west-2.rds.amazonaws.com' password='******'" ) print("connected") except: print("I am unable to connect to the database") cur = conn.cursor() reader = csv.reader(open('data.csv', 'r')) dataText = ','.join( cur.mogrify( "(%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s,%s)", tuple(row)) for i, row in enumerate(reader)) cur.execute('INSERT INTO "apple_sensor" VALUES ' + dataText) conn.commit() cur.close() print("Data loaded successfully ... ")