def create_song(path, duration, playlist_name='unknown', db_name=''): if db_name: engine = create_db(db_name) else: engine = create_db() if exists(path): original_song = EasyID3(path) db_song = { 'name': original_song.get('title', [''])[0], 'path': path, 'artist': original_song.get('artist', [''])[0], 'length': duration, 'album': original_song.get('album', [''])[0] } # original_song = MP3(path) # song['length'] = original_song.info.length created_song = insert_song_into_db(engine, db_song) playlist_info = { 'name': playlist_name, 'song_id': created_song.id } insert_playlist_into_db(engine, playlist_info) else: raise Exception('There is not such file or directory! Try another one')
def __init__(self, jsonData, jobData): #super().__init__() threading.Thread.__init__(self) self.fake = Faker() self.seed = 0 self.random = Random(self.seed) Faker.seed(self.seed) self.fake.add_provider(internet) self.fake.add_provider(phone_number) self.fake.add_provider(company) self.jsonData = jsonData self.fakeData = jsonData self.jobData = jobData self.ws = None self.table_count = 0 self.row_count = 0 self.fake_string = "" self.words_engineering = [] self.row_data = {} #self.jobData = None self.error = False self.current_table = None self.last_table = None self.jobDB = connect_db() self.jobCursor = self.jobDB.cursor(dictionary=True) try: self.jobCursor.execute("use %s"%(self.jobData['database_name'])) except Exception as e: #Create database create_db(self.fakeData, True) self.jobCursor.execute("use %s"%(self.jobData['database_name'])) self.fakerDB = connect_db() self.fakerCursor = self.fakerDB.cursor(dictionary=True) self.fakerCursor.execute("use %s"%(os.getenv("FAKER_DATABASE"))) self.tabledata = {} self.sqlValues = "" self.valuesCount = 0 self.sql_insert = "" self.newTable = True self.table_each_row_count_source = 0 #Each record is source table self.table_each_row_count_destination = 0 #Each record in destination table. There can be multiples of desination records for each source record try: from collections import OrderedDict except ImportError: OrderedDict = dict
import db_output # for getting db_info in nice format def tolist(db_info): list1 = [] for i in db_info: list1.append(i[0]) return (list1) features_file = "../db/derived_features_list.txt" connection = sqlite3.connect('../db/error_dist_simulation.db') cursor = connection.cursor() create_database.create_db(cursor, features_file=features_file, REMOVE_RECORDS=True) # for viewing features table sql_cmd = """CREATE VIEW IF NOT EXISTS features_short AS SELECT source_id,freq1_harmonics_freq_0,std,max,weighted_average FROM features""" cursor.execute(sql_cmd) ## given curve, cadence returns tfe def ComputeTfe(aCurve, aCadence): mag_min_this = 12 phase_this = scipy.stats.uniform(loc=0.0, scale=1.0).rvs() period_this = aCurve.period_this times = (aCadence.cadence_this - aCadence.cadence_this[0] + (period_this * phase_this)) errors = aCadence.error_this
def main(): engine = create_db() update_song_id3(engine, 'Bailando.mp3', new_artist='Enrique Iglesias', new_album='2003')
## put results in a list def tolist(db_info): list1 = [] for i in db_info: list1.append(i[0]) return(list1) ## make and test connection to the database features_file = "../db/derived_features_list.txt" connection = sqlite3.connect('../db/hip_three_class.db') cursor = connection.cursor() create_database.create_db(cursor,features_file=features_file,REMOVE_RECORDS=True) folder = "../data/debosscher" connection.commit() create_database.ingest_many_xml(folder,cursor,connection, survey="hipparcos", number_processors=2) ## make a nice view of the features table sql_cmd = """CREATE VIEW IF NOT EXISTS features_short AS SELECT source_id,freq1_harmonics_freq_0,std,max,weighted_average FROM features""" cursor.execute(sql_cmd) ## make a nice view of the features table sql_cmd = """CREATE VIEW IF NOT EXISTS sources_short AS SELECT source_id,original_source_id,classification,noisification,noise_args,true_period FROM sources""" cursor.execute(sql_cmd)
def jobRun(self): #Create database create_db(self.fakeData, True) self.openWebsocket() try: self.wsMessage("Created database %s"%(self.fakeData['database_name']), "running") #Use database self.jobCursor.execute("USE %s"%(self.fakeData['database_name'])) for table in self.fakeData['tables']: if len(self.sqlValues) > 0: sql = "INSERT INTO %s"%(self.last_table['table_name']) sql = sql + "(" for field in self.last_table['fields']: field_def = field if(field['fake'] == None or type(field['fake']) is not list or len(field['fake']) == 0): continue sql = sql + field['name'] + "," sql = sql[0:-1] sql = sql + ") values " self.sql_insert = sql sqlStatement = sql + self.sqlValues[:-1] self.jobCursor.execute(sqlStatement) self.jobDB.commit() self.sqlValues = "" self.valuesCount = 0 self.jobDB.commit() self.current_table = table['table_name'] self.newTable = True if self.error == True: self.wsMessage("Ending job run due to error, see logs above for more details.", "error") self.ws.close() return self.wsMessage("Processing table %s"%(table['table_name']), "running") self.table_count = self.table_count + 1 fake_qty = table['fake_qty'] if(type(fake_qty) == int): self.generateData(table, fake_qty) elif(fake_qty[0:10] == "table|each"): self.generateTableEach(table) elif(fake_qty[0:3] == "BOM"): self.generateBOM(table, fake_qty) #Run the final table if len(self.sqlValues) > 0: sqlStatement = self.sql_insert + self.sqlValues[:-1] self.jobCursor.execute(sqlStatement) self.jobDB.commit() #Clear up self.tabledata = {} #Process exports self.processExports() self.wsMessage("Database population complete created %d tables and %d records"%(self.table_count, self.row_count), "complete") self.ws.close() except Exception as e: self.wsMessage(e, "error") self.ws.close()
default=1.24, type=float, help='ver dist threshold') parser.add_argument('--code', default='SPT100', type=str, help='The course code of the lecture') args = parser.parse_args() # load model model = face_model.FaceModel(args) dirListing = os.listdir('student_profiles') # get the number of sample photos # create the database # if os.path.isfile(db): create_db() add_course("my_course") with open("students.csv", 'r', newline='') as file: reader = csv.reader(file) for row in reader: try: student = Student(row[0], row[1], row[2], float(row[3])) Student.add_student(student) Student.add_to_class(student, "my_course") except: print("Error reading csv file, probably a blank row") # student profile embeddings conn = sqlite3.connect('engage.db') c = conn.cursor()
import json import mysql.connector import time from mysql.connector import errorcode from dataset_grabber import download_dataset import create_database from operator import itemgetter create_database.create_db() # Create database try: mydb = mysql.connector.connect(host="localhost", user="******", passwd="user1", database="maindatabase") except mysql.connector.Error as err: if err.errno == errorcode.ER_ACCESS_DENIED_ERROR: print("Something is wrong with your user name or password") elif err.errno == errorcode.ER_BAD_DB_ERROR: print("Database does not exist") else: print(err) if mydb.is_connected(): print('Connection to database established!') mycursor = mydb.cursor() def start_database(): # Create table and fill it mycursor.execute( "CREATE TABLE IF NOT EXISTS CountriesPerDay (year VARCHAR(4), month VARCHAR(2), day VARCHAR(2), fulldate DATE, cases INT, deaths INT, country VARCHAR(100), geoId VARCHAR(15), popData2018 BIGINT, continent VARCHAR(50), PRIMARY KEY (fulldate, country))"
# -*- coding: utf-8 -*- """ Script to determine when a college football program from a Power-5 conference (ACC, SEC, Big Ten, Big 12, Pac-12) plays a non-Power 5 team on the road. This a semi-rare occurance, Requirements: Python with the SQLAlchemy and Beautiful Soup modules Project page: https://github.com/inkjet/FBS_Power5 Author: Scott Rodkey, [email protected] """ from create_database import create_db, db_location from populate_db import populate_db from calc_matchups import calc_matchups # Create a blank database -- edit create_database.py to specify a specific DB location create_db() # Scrape all FBS schools and their conference from Wikipedia and place them in the database populate_db(db_location) # Look at the 2015 schedule and print a week-by-week report to see if a Power-5 school # is playing a non-Power 5 school on the road calc_matchups(db_location)