def setup(self, new_experiment_uuid, frame_uuid_map, track_uuid_map): self.db = Database() self.tx, self.transaction = self. async (self.db.transaction()) self.new_experiment_uuid = new_experiment_uuid self.frame_uuid_map = frame_uuid_map self.track_uuid_map = track_uuid_map self.verbose = False
async def show_frame(args): ''' args: experiment_uuid frame_number file ''' if len(args) < 3: print("Please supply \"experiment frame_number file\"") else: db = Database() experiment_uuid = args[0] frame_number = int(args[1]) file = args[2] experiment_dir = os.path.join(config.experiment_dir, experiment_uuid) async for record in db.query( """ SELECT frame FROM frame WHERE experiment = $1 AND Frame.number = $2""", experiment_uuid, frame_number): print("Found frame", record['frame']) im = Image.open( os.path.join(experiment_dir, str(record['frame']), file)) # im.save('show_frame_test.png') im = np.array(im) plt.gray() plt.imshow(im.squeeze()) plt.show()
async def exportParticles(args): directory = args[0] limit = args[1] db = Database() q = """ SELECT e.experiment, f.frame, t.track FROM experiment e, frame f, track t, particle p WHERE e.experiment = f.experiment AND p.particle = t.particle AND f.frame = t.frame AND e.experiment = 'b6734bad-2dfc-4502-9260-a7d71e72f6a9' AND p.area > 100 AND p.category in (2,3) ORDER BY RANDOM() LIMIT {limit} """ s = q.format(limit=limit) crops = [] async for result in db.query(s): srcFile = os.path.join( config.experiment_dir, str(result["experiment"]), str(result["frame"]), str(result["track"]) + ".jpg", ) dstFile = os.path.join(directory, str(result["track"]) + ".jpg") shutil.copyfile(srcFile, dstFile)
def info(self, message): self.book = {} self.books = [] self.message = message db = Database() query = db.query("SELECT * FROM `book`", "").fetchall() # add items from db to dict for json output for item in query: book = Book(item['id']) self.book = { 'id': item['id'], 'title': item['title'], 'author': item['author'], 'date': item['date'], 'ISBN': item['ISBN'], 'ext': item['ext'], 'language': item['language'], 'image': book.image, 'md5': item["md5"] } self.books.append(self.book) self.json_prepared['books'] = self.books self.json_prepared['books'] = self.books return self.json_prepared
async def exportDensityTest(experiment_uuid): """ The purpose of this function is to test if there is a correloation between the "density" of paarticles in a frame and the number of "out-of-control" particles """ db = Database() s1 = """ SELECT number, frame from frame where experiment ='{experiment}' ORDER BY number; """ s2 = """ SELECT count(*) as count FROM particle p, frame f, track t WHERE p.particle = t.particle AND f.frame = '{frame}' AND f.frame = t.frame """ s3 = """ SELECT count(*) as count FROM particle p, frame f, track t WHERE p.particle = t.particle AND f.frame = '{frame}' AND f.frame = t.frame AND (2*p.radius < 194 OR 2*p.radius > 224) """ all_particles_by_frame = [] ooc_particles_by_frame = [] async for record in db.query(s1.format(experiment=experiment_uuid)): async for r in db.query(s2.format(frame=record["frame"])): all_particles_by_frame.append(r["count"]) async for r in db.query(s3.format(frame=record["frame"])): ooc_particles_by_frame.append(r["count"]) print( "Frame:", record["number"], all_particles_by_frame[-1], ooc_particles_by_frame[-1], ) # print(all_particles_by_frame) # print(ooc_particles_by_frame) # DO what ya want :) import matplotlib.pyplot as plt plt.scatter(all_particles_by_frame, ooc_particles_by_frame) plt.show()
def __init__(self): super(Pocket, self).__init__() Database.__init__(self) self.prompt = self.console_prompt + self.console_prompt_end self.hidden_commands.extend([ 'alias', 'edit', 'macro', 'py', 'pyscript', 'shell', 'shortcuts', 'load' ]) self.do_banner(None)
def __init__(self, debug=False): self.numDataPoints = None self.numTrainBatch = None self.numTestBatch = None self.splitPercent = 0.8 self.method = "simulation2Corrupt_tracking" self.debug = debug self.db = Database() self.logger = logging.getLogger('dvTraining') self.processors = []
def check(self, md5): db = Database() data = db.query("SELECT * FROM book WHERE md5=:md5", { "md5": md5 }).fetchall() db.commit() if len(data) == 0: return True else: return False
async def list_experiments(): db = Database() async for record in db.query(""" SELECT experiment, day, name, count(frame) as frames, notes FROM Experiment LEFT JOIN Frame USING (experiment) GROUP BY experiment"""): print(record["experiment"], record["day"], record["name"], record["frames"], "frames", record["notes"])
class Ranking: def __init__(self): self.db = Database() def get(self, cpe=None, regex=False): return self.db.ranking_find(cpe=cpe, regex=regex) def remove(self, cpe): if isinstance(cpe, str) and len(cpe) > 0: self.db.ranking_remove(cpe) def add(self, cpe, key, rank): self.db.ranking_add(cpe, key, rank)
async def list_frames(args): db = Database() experiment = args[0] async for record in db.query( """ SELECT frame FROM Frame LEFT JOIN Experiment USING (experiment) WHERE experiment = $1 ORDER BY number ASC""", experiment): print(record["frame"])
async def get_frame(args): db = Database() experiment = args[0] frame_number = int(args[1]) async for record in db.query( """ SELECT frame FROM Frame LEFT JOIN Experiment USING (experiment) WHERE experiment = $1 AND number = $2""", experiment, frame_number): print(record["frame"])
async def inner_loop(self): print("DVQueryProcessor ready.") db = Database() while True: if not self.stopped(): sql_drop = self.inputQueue.get() if sql_drop is None: self.stop() else: query = sql_drop async for result in db.query(query): if self.stopped(): break loc1 = (result["loc1"][0], result["loc1"][1]) loc2 = (result["loc2"][0], result["loc2"][1]) frameFile1 = os.path.join(config.experiment_dir, str(result["experiment1"]), str(result["frame1"]), '64x64.png') frame1 = io.imread(frameFile1, as_grey=True) frameFile2 = os.path.join(config.experiment_dir, str(result["experiment2"]), str(result["frame2"]), '64x64.png') frame2 = io.imread(frameFile2, as_grey=True) # frame1 = np.random.normal(0, 0.1, frame1.shape) # frame2 = np.random.normal(0, 0.1, frame2.shape) latent1_string = result["lat1"][1:-1].split(',') latent1 = [float(i) for i in latent1_string] latent2_string = result["lat2"][1:-1].split(',') latent2 = [float(i) for i in latent2_string] r = {"frame1": frame1, "frame2": frame2, "lat1": latent1, "lat2": latent2, "loc1": loc1, "loc2": loc2} self.outputQueue.put(r) else: break print("DVQueryProcessor Exiting")
def __init__(self, hash_rounds = 8000, salt_size = 10, _db=None): self.hash_rounds = hash_rounds self.salt_size = salt_size self.db = _db and Database(db=_db) or Database() self.CVE = CVEs() self.CPE = CPEs() self.CWE = CWEs() self.CAPEC = CAPECs() self.VIA4 = VIA4s() self.Users = Users() self.Ranking = Ranking() self.Plugins = Plugins() self.Redis = Redis()
async def query(self, experiment_uuid): db = Database() async for segment in db.query( """ SELECT segment, number FROM segment WHERE experiment = $1 ORDER BY number ASC """, experiment_uuid, ): if self.verbose: print("Emitting segment", segment["number"], "for processing...") self.put(segment["segment"])
def __init__(self, id): db = Database() query = db.query("SELECT * FROM book WHERE id=:id", { "id": id }).fetchone() self.author = query['author'] self.title = query['title'] self.ext = query['ext'] self.real_name = self.author + " - " + self.title + "." + self.ext self.dir = 'books/' + self.author + "/" + self.title + "/" for item in [".jpg", ".jpeg", ".png", ".gif"]: if os.path.isfile(self.dir + "cover" + item): self.image = self.dir + "cover" + item self.full_path = self.dir + self.real_name
def __init__(self, id): """init """ self.id = id (self.cur, self.query) = Database(self.id).db_init() self.json_file = str.join('.', (self.id.upper(), 'json')) self.yaml_file = str.join('.', (self.id.upper(), 'yaml'))
def __init__(self, app): """ (object) -> void Contructor of Frontend. Will initialize other classes and set default values. """ self.is_loading_prediction = False # instanciate Database, Sync and Prediction classes and store in private variables self.database = Database() self.sync = Sync() self.prediction = Prediction(self.database) # create empty graph data to be able to display in UI self.forecast_graph = {} # store dash instance in private variable self.app = app
async def clean_experiments_dir(): exp_dirs = set([ f for f in os.listdir(config.experiment_dir) if os.path.isdir(os.path.join(config.experiment_dir, f)) ]) print("Found", len(exp_dirs), "experiment directories") exp_db = set() async for exp in Database().query( "SELECT * FROM experiment ORDER BY experiment"): exp_db.add(str(exp["experiment"])) print("Found", len(exp_db), "experiment database entries") removed = 0 for to_remove in exp_dirs - exp_db: path = os.path.join(config.experiment_dir, to_remove) try: shutil.rmtree(path) print("Removed ", to_remove) except Exception as e: print("Failed to remove ", to_remove) print(e) removed += 1 return "Removed {} invalid directories".format(removed)
async def exportParticlesVelocitiesLatents(args): directory = args[0] limit = args[1] db = Database() q = """ SELECT e.experiment, f1.frame, t1.track, t1.latent, t2.location-t1.location as delta FROM experiment e, frame f1, frame f2, track t1, track t2, particle p WHERE e.experiment = f1.experiment AND e.experiment = f2.experiment AND p.particle = t1.particle AND f1.frame = t1.frame AND f2.frame = t2.frame AND t1.particle = t2.particle AND f1.number = f2.number-1 AND e.experiment = '3a24cfcf-bef5-40a1-a477-6e7007bcd7ae' AND p.area > 100 AND f1.number > 200 AND f1.number < 500 AND p.category in (2,3) ORDER BY RANDOM() LIMIT {limit} """ s = q.format(limit=limit) crops = [] line = "{track}, {dx}, {dy}, {latent}\n" outFile = os.path.join(directory, "data.txt") with open(outFile, "w+") as f: async for result in db.query(s): srcFile = os.path.join( config.experiment_dir, str(result["experiment"]), str(result["frame"]), str(result["track"]) + ".jpg", ) dstFile = os.path.join(directory, str(result["track"]) + ".jpg") shutil.copyfile(srcFile, dstFile) dx = result["delta"][0] dy = result["delta"][1] f.write( line.format(track=result["track"], dx=dx, dy=dy, latent=result["latent"]))
def student_list(dept): connection = Database.getconnection() with connection.cursor() as cur: try: cur.execute(f"SELECT * FROM student WHERE majordept LIKE '{dept}%'") return cur.fetchall() except (Exception, dbl.DatabaseError) as e: print(e)
def enrollment_list(dept, term): connection = Database.getconnection() with connection.cursor() as cur: try: cur.execute(f"SELECT enroll.* FROM enroll INNER JOIN section ON(enroll.crn=section.crn) WHERE section.cprefix LIKE '{dept}%' and section.term='{term}'") return cur.fetchall() except (Exception, dbl.DatabaseError) as e: print(e)
def getcourses(dept): connection = Database.getconnection() with connection.cursor() as cur: try: cur.execute(f"SELECT cno, ctitle, chours FROM course WHERE cprefix LIKE '{dept}%'") return cur.fetchall() except (Exception, dbl.DatabaseError) as e: print(e)
def getdepts(): connection = Database.getconnection() with connection.cursor() as cur: try: cur.execute(f"SELECT DISTINCT cprefix FROM section") return cur.fetchall() except (Exception, dbl.DatabaseError) as e: print(e)
def main(): """Fonction principale de mise à jour des données GPS""" # Initialisation de la connexion à la base de données database = Database() # Authentification auprès de l'API (->récupération token) api = Api() # Récupération de la liste des capteur actif # et de leur date de dernière synchronisation capteurs = database.selectCapteurs() # On boucle sur les capteurs pour récupérer les données for capteur in capteurs: capt_id = capteur['capt_id'] deviceId = capteur['capt_id_constructeur'] dtStart = capteur['loc_date_utc'].strftime("%Y-%m-%dT%H:%M:%S") responses = api.getlocalisation(deviceId, dtStart) # On boucle sur les nouvelles localisations GPS for response in responses: loc_long = response['Longitude'] loc_lat = response['Latitude'] loc_dop = response['PDOP'] loc_altitude_capteur = response['Altitude'] loc_temperature_capteur = response['Temperature'] loc_date_capteur_utc = response['RecDateTime'] if loc_long == 0 and loc_lat == 0: # Ici on n'a pas de coordonnées pour ce capteur à cette date loc_commentaire = 'Erreur : Pas de coordonnées' loc_anomalie = True database.insertNoLocData(capt_id, loc_dop, loc_altitude_capteur, loc_temperature_capteur, loc_date_capteur_utc, loc_commentaire, loc_anomalie) else: # Ici il n'y a pas d'anomalie dans les coordonnées database.insertLocData(capt_id, loc_long, loc_lat, loc_dop, loc_altitude_capteur, loc_temperature_capteur, loc_date_capteur_utc) # Fermeture de la connexion à la base de données database.close()
def teardown(self): self.tx, self.transaction = self. async (Database().transaction()) try: self. async (self.copy_to_database()) except Exception as e: print("rolling back database") self. async (self.transaction.rollback()) else: self. async (self.transaction.commit())
def checkDatabase_py(): print('Testing instantiation...', end='') db_to_check = Database() print('Instantiation OK.') print('Testing addToDb()...', end='') song_to_check = Song(askSongLoc()) db_to_check.addToDb(song_to_check) print('addToDb() OK.') print('Testing listDbDir()...') song_dir_to_check = song_to_check._file.rsplit('/')[-2] pprint.pprint(db_to_check.listDbDir(song_dir_to_check)) print('listDbDir() OK.') print('Testing destruction...', end='') del db_to_check print('Destruction OK.')
def book(self, id): db = Database() book = Book(id) try: os.remove(book.full_path) os.remove(book.image) db.query("DELETE FROM book WHERE id=:id", {"id": id}).fetchone() db.commit() self.error = "Success" except FileNotFoundError as e: self.error = str(e) + " Deleting from db anyways" db.query("DELETE FROM book WHERE id=:id", {"id": id}).fetchone() db.commit() try: os.rmdir(book.dir) except: pass
def get_home_data(term, year, dept): connection = Database.getconnection() with connection.cursor() as cur: try: cur.execute( f"SELECT crn, cno, days, starttime, endtime, room, cap, instructor FROM section where term = '{term}' AND year = {year} AND cprefix LIKE '{dept}%'" ) return cur.fetchall() except (Exception, dbl.DatabaseError) as e: print(e)
async def frame_resize(args): ''' args: experiment_uuid size ''' experiment_uuid = args[0] experiment_dir = os.path.join(config.experiment_dir, experiment_uuid) # video_file = args[1] video_file = os.path.join(experiment_dir, "extraction.mp4") size = int(args[1]) db = Database() print("Opening video file") gray = True video = Video(video_file, gray=gray) async for record in db.query( """ SELECT frame, number FROM frame WHERE experiment = $1 ORDER BY number """, experiment_uuid): # get the video frame if record["number"] and not record["number"] % 10: print("Resizing frame", record["number"]) # image = video.normal_frame(record["number"]) image = video.frame(record["number"]) # resize the image image = np.uint8(255 * resize(image, (size, size))) if gray: image = np.squeeze(image) # store the image in the filesystem frameDir = os.path.join(config.experiment_dir, str(experiment_uuid), str(record["frame"])) outfile = os.path.join(frameDir, str(size) + 'x' + str(size) + '.png') if not os.path.exists(frameDir): os.mkdir(frameDir) imsave(outfile, image)