def __init__(self, dir, dsd, settings): """Initiate the data manager. Arguments dir -- directory containing the subject icons dsd -- data source description settings -- settings """ DataManager.__init__(self) self._harvester.newestId = self._cache.newestId self._items = self._cache.items self._newestId = self._cache.newestId self._historylength = dsd.maxcount self.interval = dsd.interval self._itemids = dict() self._sorteditems = dict() self._allitems = dict() self._subjectobservers = [] self.subjectindex = 0 self.subjecticons = GetIconItems(dir, settings) self.subjects = [] for i in settings.subjects: subject = i.strip(subjectstrip) self._itemids[subject] = dict() self._sorteditems[subject] = [] self.subjects.append(subject) self._initialUpdate() self._setsubject()
class WordSniffer(controller.Master): def __init__(self, server): controller.Master.__init__(self, server) self.daumdicparser = DaumDicParser() self.datamanager = DataManager('127.0.0.1', 27017) def run(self): print 'proxy server is running on 8080' try: return controller.Master.run(self) except KeyboardInterrupt: self.shutdown() def handle_response(self, msg): host = msg.request.host path = msg.request.path msg.reply() if host == 'dic.daum.net' and '/word/view.do' in path: word = self.daumdicparser.parse(msg.content) word_dict = word.__dict__ word_dict['inserted_time'] = datetime.now() self.datamanager.save(word_dict)
def dconvert_isq_file(isq_filename): dm = DataManager(isq_filename) stack_path = dm.spath('raw_stack') convert_large_isq_file(isq_filename, stack_path)
def start(self, pause_sec=2): wi = WebInterface(DownloadManager.base_url) wp_all = AllHeroesParser() wp_detail = HeroDetailParser() heroes_all = wp_all.parse(wi.load_html_content(DownloadManager.all_heroes_url)) self.heroes = [] for i, hero in enumerate(heroes_all): img_path = DataManager.convert_url_to_img_path(hero['url'], hero['img_url']) sleep(pause_sec) details = wp_detail.parse(wi.load_html_content(hero['url'])) related_to = {} for related_hero in details: related_to[related_hero['name']] = related_hero['advantage'] self.heroes.append(Hero(name=hero['name'], img_path=img_path, related_to=related_to)) try: with open(img_path, 'rb') as fp: pass except IOError: # image file not found sleep(pause_sec) DataManager.save_image(wi.load_image(hero['img_url']), img_path) self.hero_loaded.emit(hero['name'], i+1, len(heroes_all)) self.job_done.emit()
def main(): from optparse import OptionParser usage = "usage: %prog [[option] <Country regex> [attribute regex]] " version = APP_VERSION parser = OptionParser( usage = usage, version="%prog " +version ) parser.add_option("-v", "--verbose", action="store_true", dest="verbose", default=False, help="Make lots of noise") parser.add_option("-f", "--file", action="store", dest="filename", help="Initialize store for country stats") (options, args) = parser.parse_args() if len(args) < 1 and options.filename == None: # interactive mode CommandDispatcher() else: dm = DataManager(options.filename, options.verbose, excludeList=['Country']) # command line query mode if len(args) >= 1 and dm.getSize() > 0: # Find countries using regular expression match result = dm.getCountryApprox( args[0] ) # if no arguments, then assume all country stats are requested propertyRE = args[1] if len(args) == 2 else "" for c in result: print( c.getPropertyString(propertyRE) )
def generate(pdfname): datamanager = DataManager('127.0.0.1', 27017) pdf = FPDF() pdf.add_page() pdf.set_author('GRE Master') pdf.set_title('GRE Word') #pdf.set_font('Arial', 'B', 16) pdf.add_font('eunjin', '', 'Eunjin.ttf', uni=True) pdf.set_font('eunjin', '', 16) pdf.add_font('bangwool', '', 'Bangwool.ttf', uni=True) pdf.set_font('bangwool', '', 16) #pdf.add_font('DejaVu', '', 'DejaVuSansCondensed.ttf', uni=True) #pdf.set_font('DejaVu', '', 16) for row in datamanager.find(): text = row['text'] meanings = row['meaning'] synonyms = row['synonyms'] meaning = ','.join(meanings) synonym = u','.join(synonyms) line = '%s : %s \n synonyms : %s' % (text, meaning, synonym) #pdf.cell(20, 10, row['text'], 0, 0)# + ' ' + ','.join(row['meaning'])) pdf.multi_cell(0, 8, line, 1, 'J') pdf.output(pdfname, 'F')
def Application(): import interrupt e = interrupt.GetInterruptEvent() while not e.wait(0.5): managers = [] mgr1 = DataManager() mgr2 = RequestManager() mgr1.connect_to(mgr2) mgr2.connect_to(mgr1) managers.append(mgr1) managers.append(mgr2) for mgr in managers: mgr.start() print 'In main run loops...' while not e.wait(0.5): for mgr in managers: if not mgr.is_alive(): print mgr.name, 'crashed. Restarting...' pass print 'Stopping managers...' for mgr in managers: mgr.stop()
def pre_prosseccing(self): dm = DataManager() data = dm.select_all_db('eco_news_data') data = data['제목'] with open(self.model_path + '/model.documents', 'wb') as f: pickle.dump(data, f) # # 수정된 job_title에서 posting_id 가지고 오기 # posting_ids = data['posting_id'] # posting_list = posting_ids.to_list() # # # posting_id에 따라 description_data set 만들기 # des_data = [data['job_description'][id] for id in posting_ids] # title_data = [data['job_title'][id] for id in posting_ids] # id_list = [i for i in range(len(posting_list))] # df = pd.DataFrame({'id': posting_list, 'job_title': title_data, 'job_description': des_data, 'posting_id':posting_list}) # df.to_csv('data/doc2vec_test_data/0702/merge_0629_adj.csv', mode='w', encoding='utf-8') # 수정된 description set 불러와 데이터 전처리 수행 # data = dm.load_csv(file='data/doc2vec_test_data/0702/merge_0629_adj.csv', encoding='utf-8') sentences = self.data_text_cleansing(data) data_words = list(self.sent_to_words(sentences)) # data_words_nostops = self.remove_stopwords(data_words) # data_lemmatized = self.lematization(data_words) # print(data_lemmatized) # bigram = self.make_bigram(data_lemmatized) ## 형태소 분석을 먼저 수행한 후 bigram을 만들어야 함 data_lemmatized = self.lematization(data_words) trigram = self.make_bigram(data_lemmatized, trigram_check=1) with open(self.model_path + '/model.corpus', 'wb') as f: pickle.dump(trigram, f) return trigram
def set_library_window_values(force=False): log.debug("set_library_window_values Called forced={0}", force) home_window = HomeWindow() already_set = home_window.getProperty("view_item.0.name") if not force and already_set: return for index in range(0, 20): home_window.clearProperty("view_item.%i.name" % index) home_window.clearProperty("view_item.%i.id" % index) home_window.clearProperty("view_item.%i.type" % index) home_window.clearProperty("view_item.%i.thumb" % index) data_manager = DataManager() url = "{server}/emby/Users/{userid}/Views" result = data_manager.GetContent(url) if result is None: return result = result.get("Items") server = downloadUtils.getServer() index = 0 for item in result: type = item.get("CollectionType") if type in ["movies", "boxsets", "music", "tvshows"]: name = item.get("Name") id = item.get("Id") # plugin.video.jellyfincon- prop_name = "view_item.%i.name" % index home_window.setProperty(prop_name, name) log.debug( "set_library_window_values: plugin.video.jellyfincon-{0}={1}", prop_name, name) prop_name = "view_item.%i.id" % index home_window.setProperty(prop_name, id) log.debug( "set_library_window_values: plugin.video.jellyfincon-{0}={1}", prop_name, id) prop_name = "view_item.%i.type" % index home_window.setProperty(prop_name, type) log.debug( "set_library_window_values: plugin.video.jellyfincon-{0}={1}", prop_name, type) thumb = downloadUtils.getArtwork(item, "Primary", server=server) prop_name = "view_item.%i.thumb" % index home_window.setProperty(prop_name, thumb) log.debug( "set_library_window_values: plugin.video.jellyfincon-{0}={1}", prop_name, thumb) index += 1
def __init__(self, width, height): self.width = width self.height = height self.world = World(100, 100) self.dm = DataManager() newObject = self.dm.createInstance("mobile rectangle", "sims") self.world.addToWorld(newObject, 20, 20, 10, 5)
def main(): parser = argparse.ArgumentParser(__doc__) parser.add_argument('data_file', help="Path to data file") args = parser.parse_args() dm = DataManager(args.data_file) print dm.spath('some_stack')
def __init__(self, master): Frame.__init__(self, master) Frame.configure(self) db = DataManager(nom_fichier_bdd) self.liste_film = db.recup_films_reponses() self.question_freq_absolue = questions_maximisant_esperance( self.liste_film) self.liste_questions_posee = list() self.liste_question_repondue = list() self.liste_reponse = list() self.liste_questions_posee.append(10) self.liste_questions_posee.append(11) self.question_ideale = question_ideale(self.liste_film, self.liste_questions_posee) self.liste_filtre = self.liste_film message_jeu = Label(self, text="Jeu", font=("Courrier", 30, "bold"), foreground="purple") message_jeu.grid(row=1, column=1) self.question = Label(self, text=self.question_ideale[1], pady=10) self.question.grid(row=2, column=1, pady=20) self.bouton_oui = Button(self, text="Oui", foreground="black", background="green", width=10, height=3, command=self.repondre_oui) self.bouton_oui.grid(row=3, column=1, padx=20) self.bouton_jsp = Button(self, text="Je ne sais pas", width=10, height=3, command=self.repondre_jsp) self.bouton_jsp.grid(row=4, column=1, padx=20) self.bouton_non = Button(self, text="Non", foreground="black", background="red", width=10, height=3, command=self.repondre_non) self.bouton_non.grid(row=5, column=1, padx=20) self.bouton_retour = Button( self, text="Retour", foreground="purple", background="pink", command=lambda: master.switch_frame(StartPage)) self.bouton_retour.grid(row=0, column=0, padx=1, pady=5)
def test_integracion(self): DM = DataManager(os.environ['DB_URI'], 'test', 'test_shops_integration') shop = { "nombre": "App Store", "descripcion": "Tienda de electronica", "direccion": "avd Barcelona", "info": "applesupport", "productos": [] } product = { "nombre_p": "Iphone 6", "descripcion_p": "Iphone 6 64GB", "precio": 700, "cantidad": 5, "online": "" } self.assertEqual(DM.save_one(shop), True) self.assertEqual(DM.insert_product(shop['nombre'], product), True) self.assertEqual(DM.find_product(shop['nombre'], product['nombre_p']), product) product = { "nombre_p": "Iphone 6", "descripcion_p": "Iphone 6 64GB Plata" } self.assertEqual(DM.update_product(shop['nombre'], product), True) self.assertEqual( DM.delete_product(shop['nombre'], product['nombre_p']), True) DM.getCollection().drop()
class CaptureSession: """ Interfaces between card, gui and data buffers """ def __init__(self, globalSession, name): self.name = name self.globalSession = globalSession self.settings = self.globalSession.settings self.dataManager = DataManager(self.settings, self.globalSession) self.fitted = False self.acquisitionDone = False self.dataInMemory = False # initialize file manager self.fileManager = FileManager(self.settings) def run(self): # Guarantee latest settings before running self.dataManager.updateSettings(self.globalSession.settings) self.fileManager.updateSettings(self.globalSession.settings) self.storeThread = threading.Timer(0.005, self.dataManager.storeData).start() time.sleep(0.25) self.saveData() def stopRun(self): self.acquisitionDone = True self.dataInMemory = True try: self.saveThread.cancel() except AttributeError: pass self.saveData() def readData(self): if self.acquisitionDone and not self.dataInMemory: allData = self.fileManager.readData(self.name) self.dataManager.setData(allData) self.dataInMemory = True def saveData(self): """ Save session to current filename """ self.fileManager.saveData(self.name, self.dataManager.getAllData()) if self.globalSession.running and not self.globalSession.stopProgram: self.saveThread = threading.Timer(5, self.saveData).start()
def __init__(self, package_path, parent=None): super(NoteModel, self).__init__(parent) self.notes = [] dm = DataManager(package_path) data = dm.getData() # load notes from data file for line in data: title, description, color, rate = line.split('|') self.notes.append(NoteItem(title, description, COLORS[color], rate))
def dcc_centroids(isq_filename): dm = DataManager(isq_filename, working_base='/Users/hartleym/working/ct_analysis') input_path = dm.spath('threshdt') with open('centroids.txt', 'w') as f: for n, image in enumerate(yield_stack_from_path(input_path)): centroids = find_component_centroids(image) for c in centroids: f.write("{}\n".format(c + [n]))
class CaptureSession: """ Interfaces between card, gui and data buffers """ def __init__(self, globalSession,name): self.name = name self.globalSession = globalSession self.settings = self.globalSession.settings self.dataManager = DataManager(self.settings, self.globalSession) self.fitted = False self.acquisitionDone = False self.dataInMemory = False # initialize file manager self.fileManager = FileManager(self.settings) def run(self): # Guarantee latest settings before running self.dataManager.updateSettings(self.globalSession.settings) self.fileManager.updateSettings(self.globalSession.settings) self.storeThread = threading.Timer(0.005, self.dataManager.storeData).start() time.sleep(0.25) self.saveData() def stopRun(self): self.acquisitionDone = True self.dataInMemory = True try: self.saveThread.cancel() except AttributeError: pass self.saveData() def readData(self): if self.acquisitionDone and not self.dataInMemory: allData = self.fileManager.readData(self.name) self.dataManager.setData(allData) self.dataInMemory = True def saveData(self): """ Save session to current filename """ self.fileManager.saveData(self.name,self.dataManager.getAllData()) if self.globalSession.running and not self.globalSession.stopProgram: self.saveThread = threading.Timer(5, self.saveData).start()
def get_stop_words(self): print(' -> Getting stop word list...') file = 'stopwords_list.csv' stop_words_list = [] if os.path.isfile(self.data_path+file): print(' -> Stop Words File is found') dm = DataManager() df = dm.load_csv(file=self.data_path + file, encoding='utf-8') stop_words_list = df['Stopwords'].tolist() else: print(' -> Stop Words File is not found') return stop_words_list
def get_stop_words(self, path): file = 'stopwords_list.csv' stop_words_list = [] if os.path.isfile(path + '/' + file): print(' ..Stop Words File is found..') dm = DataManager() df = dm.load_csv( file='data/doc2vec_test_data/0702/stopwords_list.csv', encoding='utf-8') stop_words_list = df['Stopwords'].tolist() else: print(' ..Stop Words File is not found..') return stop_words_list
def get_including_words(self, path): file = 'including_words_list.csv' including_words_list = [] if os.path.isfile(path + '/' + file): print(' ..Including Words File is found..') dm = DataManager() df = dm.load_csv(file=path + 'including_words_list.csv', encoding='utf-8') including_words_list = df['Includingwords'].tolist() else: print(' ..Including Words File is not found..') print(including_words_list) return including_words_list
def get_including_words(self): print(' -> Getting including word list...') file = 'including_words_list.csv' including_words_list = [] if os.path.isfile(self.data_path+file): print(' -> Including Words File is found') dm = DataManager() df = dm.load_csv(file=self.data_path+file, encoding='utf-8') including_words_list = df['Includingwords'].tolist() else: print(' -> Including Words File is not found') print(including_words_list) return including_words_list
def __init__(self, master, borne_inf, borne_sup): Frame.__init__(self, master) Frame.configure(self) self.db = DataManager(nom_fichier_bdd) self.borne_inf = borne_inf self.borne_sup = borne_sup self.nb_filmmax = borne_sup - borne_inf self.nb_filmfait = 0 self.questions_non_repondue = self.db.recup_questions_non_repondues( borne_inf, borne_sup) self.cles_films = list() self.cles_questions = list() for (cle1, cle2) in self.questions_non_repondue.keys(): self.cles_films.append(cle1) self.cles_questions.append(cle2) self.i_film = 0 self.i_question = 0 self.message_jeu = Label(self, text=self.questions_non_repondue.get( (self.cles_films[0], self.cles_questions[0])), font=("Courrier", 12, "bold"), foreground="black") self.message_jeu.grid(row=1, column=1) self.bouton_oui = Button(self, text="Oui", foreground="black", background="green", width=10, height=3, command=self.repondre_oui) self.bouton_oui.grid(row=5, column=1, padx=20) self.bouton_non = Button(self, text="Non", foreground="black", background="red", width=10, height=3, command=self.repondre_non) self.bouton_non.grid(row=6, column=1, padx=20) self.bouton_retour = Button( self, text="Retour", foreground="purple", background="pink", command=lambda: master.switch_frame(StartPage)) self.bouton_retour.grid(row=0, column=0, padx=1, pady=5)
def playListOfItems(id_list, monitor): log.debug("Loading all items in the list") data_manager = DataManager() items = [] for id in id_list: url = "{server}/emby/Users/{userid}/Items/" + id + "?format=json" result = data_manager.GetContent(url) if result is None: log.debug("Playfile item was None, so can not play!") return items.append(result) return playAllFiles(items, monitor)
def dthresh_and_dt(isq_filename): dm = DataManager(isq_filename, working_base='/Users/hartleym/working/ct_analysis') input_path = dm.spath('raw_stack') output_path = dm.spath('threshdt') def dthresh(image): thresh = threshold_otsu(image) thresholded = image > thresh dt = distance_transform_cdt(thresholded) return dt > 18 apply_stack_transform(input_path, output_path, dthresh)
def __init__(self, globalSession, name): self.name = name self.globalSession = globalSession self.settings = self.globalSession.settings self.dataManager = DataManager(self.settings, self.globalSession) self.fitted = False self.acquisitionDone = False self.dataInMemory = False # initialize file manager self.fileManager = FileManager(self.settings)
def __init__(self, argv): self.config = yaml.load(open(".peergovrc", "r").read()) self.basedir = self.getConfig('basedir') self.datadir = self.getConfig('datadir') self.authdir = self.getConfig('authdir') self.user = self.getConfig( 'userfpr' ) #TODO: confirm that fingerprint actually exists in key ring self.port = self.getConfig('port') self.authorizations = {} self.currentAuthorization = None #authorization to vote on currentTopic self.voting = SchulzeVoting.SchulzeVoting() self.manager = DataManager() self.manager.datadir = self.datadir self.cctx = pyme.core.Context() #crypto context self.ensureDirExists(self.basedir) self.ensureDirExists(self.datadir) self.ensureDirExists(self.authdir) for root, dirs, files in os.walk(self.datadir): for dir in dirs: self.loadTopic(root + "/" + dir) for file in files: self.loadData(root, file) for root, dirs, files in os.walk(self.authdir): for file in files: self.loadAuth(root, file) if hasGui: self.initGui() self.peermanager = PeerManager(argv, peergov=self)
def analisi_nazione(self, file_nazionale, output_base, show=None, store=None): print(f'Generazione grafici nazionali al {self.__time_str}') if self.__data_nazionale is None: self.__data_nazionale = DataManager.nazionale_data(file_nazionale) self.__nazionale_linear(self.__data_nazionale, output_base=output_base, show=show, store=store) self.__nazionale_log(self.__data_nazionale, output_base=output_base, show=show, store=store) self.__nazionale_increment(self.__data_nazionale, output_base=output_base, use_percentage=False, show=show, store=store) self.__nazionale_increment(self.__data_nazionale, output_base=output_base, use_percentage=True, show=show, store=store) self.__nazionale_dettaglio(self.__data_nazionale, output_base=output_base, use_percentage=False, show=show, store=store)
def action_new_data(self): """ Handles request to show open a new set of CSV files """ new_directory = ask_directory("Choose directory to process") if new_directory != '' and DataManager.directory_has_data_files(new_directory): get_module_logger().info("Parsing directory %s", new_directory) self.gui.reset_and_show_progress_bar(new_directory) self.msg_queue = queue.Queue() self.data_manager = DataManager(self.msg_queue, new_directory) self.data_manager.start() self.loading_timer = threading.Timer(0.1, self.check_data_manager_status) self.loading_timer.start()
def __init__(self, ground_ip): self.status_vector = dict() self.command_vector = dict() self.ground_ip = ground_ip self.info_logger = InfoLogger() self.data_logger = DataLogger() self.adcs_logger = AdcsLogger() #@TODO where antenna to start #self.adcs_logger.write_info(' {}, {}, {}, {}'.format(0, 0, 0, 0)) self.elink = elinkmanager.ELinkManager(self, self.ground_ip) self.thread_elink = None self.data_manager = DataManager(self, self.info_logger, self.data_logger) self.thread_data_manager = None self.dmc = dmc.DMC(self) self.thread_dmc = None self.heat = heat.HEAT(self) self.thread_heat = None self.adc = adc.ADC(self) self.thread_adc = None self.tx = tx.TX(self) self.thread_tx = None self.counterdown = CounterDown(self) self.paths = paths.Paths() GPIO.setmode(GPIO.BOARD) Master.__instance = self
def load_data_locally(self): try: self.heroes = DataManager().load_hero_data() except IOError: info = QtGui.QMessageBox( 'Datafile not found', 'I couldn\'t load the data file from within the current directory, do you want me to download it from the website?', QtGui.QMessageBox.Information, QtGui.QMessageBox.Ok, QtGui.QMessageBox.Cancel, QtGui.QMessageBox.NoButton, self) if info.exec_() == QtGui.QMessageBox.Ok: self.download_data() else: self.buttons = [] layout = QtGui.QGridLayout() layout.addWidget(self.create_searchfield(), 0, 13, 1, 3) for i, hero in enumerate(self.heroes): pb = HeroButton(hero, self) pb.left_click.connect( self.parentWidget().parentWidget().hero_selected) pb.right_click.connect( self.parentWidget().parentWidget().show_hero_info) layout.addWidget(pb, 1 + i / HeroPoolView.heroes_per_row, i % HeroPoolView.heroes_per_row) self.buttons.append(pb) self.setLayout(layout)
def run(data_config, n_epochs, num_kernels, kernel_size, dense_hidden_size, use_maxpool, use_torch_conv_layer): """ Runs a training regime for a CNN. """ train_set = DataPartition(data_config, './data', 'train') test_set = DataPartition(data_config, './data', 'test') manager = DataManager(train_set, test_set) loss = nlog_softmax_loss learning_rate = .001 image_width = 64 net = create_cnn(num_kernels=num_kernels, kernel_size=kernel_size, output_classes=2, image_width=image_width, dense_hidden_size=dense_hidden_size, use_maxpool=use_maxpool, use_torch_conv_layer=use_torch_conv_layer) optimizer = optim.Adam(net.parameters(), lr=learning_rate) best_net, monitor = minibatch_training(net, manager, batch_size=32, n_epochs=n_epochs, optimizer=optimizer, loss=loss) classifier = Classifier(best_net, num_kernels, kernel_size, dense_hidden_size, manager.categories, image_width) return classifier, monitor
def runAcquisition(self, T_val=None): packetIdx: int = 0 command: int = 0 # 0 equals request a packet # set time value (TE/TI) if T_val is not None: self.setTval(T_val) # Get/construct package to be send tmp_sequence_pack = ComMngr.constructSequencePacket( self.operation) # uses self.operation.sequencebytestream tmp_scanparam_pack = ComMngr.constructScanParameterPacket( self.operation) # uses self.operation.scanparameters.f_Ex tmp_package = { **tmp_sequence_pack, **tmp_sequence_pack, **tmp_scanparam_pack } fields = [command, packetIdx, 0, self.version, tmp_package] response = ComMngr.sendPacket(fields) if response is None: self.parent.OpMngr.setOutput( "Console not connected. Nothing received.") self.haveResult = False return self.haveResult = True # get the actual data tmp_data = np.frombuffer(response[4]['acq'], np.complex64) print("Size of received data: {}".format(len(tmp_data))) self.dataobject: DataManager = DataManager(tmp_data, self.f_Ex, self.numSamples)
def main(): parser = argparse.ArgumentParser() parser.add_argument('filename', help='Path to ISQ file') args = parser.parse_args() dm = DataManager(args.filename) stack = Image3D.from_path(dm.spath('stripped')) seed_labels = find_seed_centroids(stack) stage = {} stage['name'] = dm.name stage['seeds'] = seed_labels with open(dm.spath('seeds.yml'), 'w') as f: yaml.dump(stage, f)
def __init__(self, width, height): self.width = width self.height = height self.world = World(100,100) self.dm = DataManager() newObject = self.dm.createInstance("mobile rectangle", "sims") self.world.addToWorld(newObject, 20, 20, 10, 5)
def toggle_watched(params): log.debug("toggle_watched: {0}", params) item_id = params.get("item_id", None) if item_id is None: return url = "{server}/emby/Users/{userid}/Items/" + item_id + "?format=json" data_manager = DataManager() result = data_manager.GetContent(url) log.debug("toggle_watched item info: {0}", result) user_data = result.get("UserData", None) if user_data is None: return if user_data.get("Played", False) is False: markWatched(item_id) else: markUnwatched(item_id)
def populate_queue(queue, fn): dm = DataManager(fn) seeds_dir = dm.spath('seeds') isolated_dir = dm.spath('isolated_seeds') docker_seeds_dir = '/working/C0000230/seeds' docker_isolated_dir = '/working/C0000230/isolated_seeds' for seed_file in os.listdir(seeds_dir)[2:3]: input_file = os.path.join(docker_seeds_dir, seed_file) output_file = os.path.join(docker_isolated_dir, seed_file) print input_file, output_file command = make_command(input_file, output_file) subprocess.call(command)
def __init__(self, dsd, settings): """Initiate the data manager. Arguments dsd -- data source description settings -- settings """ DataManager.__init__(self) itemarg = (dsd.cachedir, (settings.previewx, settings.previewy), (settings.smallpreviewx, settings.smallpreviewy)) self._cache = Cache(dsd.cachedir, WordPressItem, itemarg) self._harvester = WordpressHarvester(dsd, self._addandcheck) self._harvester.newestId = self._cache.newestId self._harvester.itemarg = itemarg self._setup(dsd)
def __init__(self, dsd, settings): """Initiate the data manager. Arguments dsd -- data source description settings -- settings """ DataManager.__init__(self) itemarg = (dsd.cachedir, (settings.previewx, settings.previewy), (settings.smallpreviewx, settings.smallpreviewy), settings.library) self._cache = Cache(dsd.cachedir, BlogspotItemWithImage, itemarg) self._harvester = BlogspotHarvester(dsd, self._addandcheck, BlogspotItemWithImage) self._harvester.newestId = self._cache.newestId self._harvester.itemarg = itemarg self._setup(dsd)
def get_next_episode(item): if item.get("Type", "na") != "Episode": log.debug("Not an episode, can not get next") return None parendId = item.get("ParentId", "na") item_index = item.get("IndexNumber", -1) if parendId == "na": log.debug("No parent id, can not get next") return None if item_index == -1: log.debug("No episode number, can not get next") return None url = ( '{server}/emby/Users/{userid}/Items?' + '?Recursive=true' + '&ParentId=' + parendId + '&IsVirtualUnaired=false' + '&IsMissing=False' + '&IncludeItemTypes=Episode' + '&ImageTypeLimit=1' + '&format=json') data_manager = DataManager() items_result = data_manager.GetContent(url) log.debug("get_next_episode, sibling list: {0}", items_result) if items_result is None: log.debug("get_next_episode no results") return None item_list = items_result.get("Items", []) for item in item_list: index = item.get("IndexNumber", -1) # find the very next episode in the season if index == item_index + 1: log.debug("get_next_episode, found next episode: {0}", item) return item return None
def tabelle(self, file_nazionale, file_regioni, output_base, show=None, store=None): print(f'Generazione tabelle riepilogative al {self.__time_str}') if self.__data_nazionale is None: self.__data_nazionale = DataManager.nazionale_data(file_nazionale) if self.__data_regionale is None: self.__data_regionale, self.__codici_regione = DataManager.regioni_data( file_regioni) self.__table_rapporto_tamponi_contagi( data_nazionale=self.__data_nazionale, data_regionale=self.__data_regionale, output_base=output_base, show=show, store=store)
def pre_prosseccing(self): dm = DataManager() data = dm.load_csv(file=self.data_path + self.data_file_name+'.csv', encoding='utf-8') with open(self.data_path + self.data_file_name+'.documents', 'wb') as f: pickle.dump(data, f) with open(self.data_path + self.data_file_name+'_tm.documents', 'wb') as f: pickle.dump(data['job_description'], f) # # 수정된 job_title에서 posting_id 가지고 오기 # posting_ids = data['posting_id'] # posting_list = posting_ids.to_list() # # # posting_id에 따라 description_data set 만들기 # des_data = [data['job_description'][id] for id in posting_ids] # title_data = [data['job_title'][id] for id in posting_ids] # id_list = [i for i in range(len(posting_list))] # df = pd.DataFrame({'id': posting_list, 'job_title': title_data, 'job_description': des_data, 'posting_id':posting_list}) # df.to_csv('data/doc2vec_test_data/0702/merge_0629_adj.csv', mode='w', encoding='utf-8') # 수정된 description set 불러와 데이터 전처리 수행 # data = dm.load_csv(file='data/doc2vec_test_data/0702/merge_0629_adj.csv', encoding='utf-8') sentences = self.data_text_cleansing(data) data_words = list(self.sent_to_words(sentences)) data_words_nostops = self.remove_stopwords(data_words) data_lemmatized = self.lematization(data_words_nostops) bigram = self.make_ngram(data_lemmatized, n=2) # bigram = self.make_bigram(data_words_nostops) # data_lemmatized = self.lematization(bigram) # for i in range(len(bigram)): # print(f'[{i}] : {bigram[i]}') data_lemmatized_filter = self.word_filtering(bigram) for i in range(len(data_lemmatized_filter)): print(f'[{i}] : {data_lemmatized_filter[i]}') # # uniquewords = self.make_unique_words(data_lemmatized) with open(self.data_path + self.data_file_name+'.corpus', 'wb') as f: pickle.dump(data_lemmatized_filter, f) print('=== end preprocessing ===') return data['id'], data_lemmatized_filter
def __init__(self, dsd, settings): """Initiate the data manager. Arguments dsd -- data source description settings -- settings """ DataManager.__init__(self) cachedir = os.path.join(settings.cachedir, dsd.cacheid) itemarg = (cachedir, (settings.previewx, settings.previewy), (settings.smallpreviewx, settings.smallpreviewy), settings.library) self._cache = Cache(cachedir, OpacBookItem, itemarg) self._harvester = getHarvester(dsd, settings, self._addandcheck) self._harvester.newestId = self._cache.newestId self._harvester.itemarg = itemarg self._sections = dsd.sections self._setup(dsd)
def spikey_redis(fn): r = redis.StrictRedis(host='192.168.99.100') dm = DataManager(fn) seeds_dir = dm.spath('seeds') isolated_dir = dm.spath('isolated_seeds') docker_seeds_dir = '/working/C0000230/seeds' docker_isolated_dir = '/working/C0000230/isolated_seeds' prefix = 'python /code/ct_pod_analysis/scripts/isolate_single_seed.py' for seed_file in os.listdir(seeds_dir)[2:10]: input_file = os.path.join(docker_seeds_dir, seed_file) output_file = os.path.join(docker_isolated_dir, seed_file) command = "{} {} {}".format(prefix, input_file, output_file) r.lpush("tasks", command)
def create(): """Create the API endpoints.""" cors = CORS(allow_all_origins=True,allow_all_methods=True, allow_all_headers=True) app = falcon.API(middleware=[cors.middleware, MultipartMiddleware()]) dm = DataManager(InMemoryStorage(), InMemoryStorage(), data_folder, lcdURL, lcdPort) app.add_route('/annotatedFiles', AnnotatedFiles(dm)) app.add_route('/annotatedFiles/json', AnnotatedFilesJSON(dm)) app.add_route('/publications', GetPublications(dm)) app.add_route('/expression', CubeExpressions(dm)) app.add_route('/corpus', CubeCorpora(dm)) app.add_route('/genre', CubeGenres(dm)) app.add_route('/function', CubeFunctions(dm)) app.add_route('/corpus/groups', GroupCorpora(dm)) app.add_route('/expression/groups', GroupExpressions(dm)) app.add_route('/function/groups', GroupFunctions(dm)) app.add_route('/genre/groups', GroupGenres(dm)) app.add_route('/groups', Groups(dm)) app.add_route('/obs/filtered', FilteredObservations(dm)) app.add_route('/obs/filtered/query', QueryFilteredObservations(dm)) app.add_route('/obs/filtered/result', FilteredResultObservations(dm)) app.add_route('/obs/filtered/preview', FilteredObservationsPreview(dm)) app.add_route('/infer', Infer(dm)) app.add_route('/cc/filtered', CCFiltered(dm)) app.add_route('/normalize', Normalize(dm)) app.add_route('/obs/norm2', CreateNormalizedCube(dm)) app.add_route('/obs/norm/query', QueryNormalizedCube(dm)) app.add_route('/obs/norm/defs', NormalizedCubeDefinitions(dm)) app.add_route('/obs/excluded', ExcludedObservations(dm)) app.add_route('/pub/excluded', ExcludedPublications(dm)) app.add_route('/image', Image(image_folder)) app.add_route('/lcd/status', CheckForLCDConnection(dm)) main_logger.info('App2 is running.') return app
def __init__(self, file_nazionale, file_regioni, file_province, show=False, store=True, color_map=None, time_str=None, max_days=0): self.__showGraph = show self.__storeGraph = store self.__max_days = max_days self.__data_nazionale = DataManager.nazionale_data( file_nazionale, max_days=self.__max_days) max_data = self.__data_nazionale.data.max() self.__time_str = time_str if time_str is not None else max_data[:10].replace( "-", "") self.__data_regionale, self.__codici_regione = DataManager.regioni_data( file_regioni, max_days=self.__max_days) self.__data_provinciale = DataManager.province_data( file_province, max_days=self.__max_days) self.__color_map = self.__color_map if color_map is None else color_map
def __init__(self, globalSession,name): self.name = name self.globalSession = globalSession self.settings = self.globalSession.settings self.dataManager = DataManager(self.settings, self.globalSession) self.fitted = False self.acquisitionDone = False self.dataInMemory = False # initialize file manager self.fileManager = FileManager(self.settings)
class API: def __init__(self): self.__db = DB() self.__auth = OAuth1(*self.__db.get_auth()) self.__url = 'https://api.twitter.com/1.1/statuses/' def __enter__(self): return self def __exit__(self, exc_type, exc_value, traceback): self.__db.save() def __post(self, url, params=None): url = self.__url + url if params is None: response = requests.post(url, auth=self.__auth) else: response = requests.post(url, auth=self.__auth, params=params) response.raise_for_status() return response.json() def __get(self, url, params=None): url = self.__url + url if params is None: response = requests.get(url, auth=self.__auth) else: response = requests.get(url, auth=self.__auth, params=params) response.raise_for_status() return response.json() def tweet(self, message): url = 'update.json' params = {'status': message} self.__post(url, params) def reply(self, message, reply_id, reply_name): url = 'update.json' message = '@{} {}'.format(reply_name, message) params = {'status': message, 'in_reply_to_status_id': reply_id} self.__post(url, params) def get_mentions(self): url = 'mentions_timeline.json' last_id = self.__db.get_last_id() params = {'since_id': last_id} if last_id is not None else None mentions = self.__get(url, params) for mention in mentions[::-1]: self.__db.set_last_id(mention['id_str']) yield mention
class Application: def __init__(self, width, height): self.width = width self.height = height self.world = World(100,100) self.dm = DataManager() newObject = self.dm.createInstance("mobile rectangle", "sims") self.world.addToWorld(newObject, 20, 20, 10, 5) def run(self): """I am called to begin the running of the game. """ running = 1 frames = 0 counter = 0 lastFrame = pyui.readTimer() endFrame = pyui.readTimer() while running: pyui.draw() if pyui.update(): # update world here interval = pyui.readTimer() - endFrame endFrame = pyui.readTimer() if self.world.update(interval) == 0: running = 0 else: running = 0 # track frames per second frames += 1 counter += 1 # calculate FPS if endFrame - lastFrame > 1.0: FPS = counter counter = 0 lastFrame = endFrame print "FPS: %2d " % FPS
def __init__(self, globalSession, errorFnc): self.settings = SessionSettings() self.fileManager = FileManager(self.settings) self.dmanager = DataManager(self.settings, globalSession, errorFnc) self.needsSaved = False # captured data that needs saved # method that updates statusbar self.globalSession = globalSession # method displays error popup self.errorFnc = errorFnc # if we load in a session from file, we don't want to # wipe it over in a new capture, so we set the # read only flag to true self.readOnly = False self.running = True self.dmanager.running = self.running
class CaptureSession: """ Interfaces between card, gui and data buffers """ def __init__(self, globalSession, errorFnc): self.settings = SessionSettings() self.fileManager = FileManager(self.settings) self.dmanager = DataManager(self.settings, globalSession, errorFnc) self.needsSaved = False # captured data that needs saved # method that updates statusbar self.globalSession = globalSession # method displays error popup self.errorFnc = errorFnc # if we load in a session from file, we don't want to # wipe it over in a new capture, so we set the # read only flag to true self.readOnly = False self.running = True self.dmanager.running = self.running def setName(self, name): """ Change the name of the current session """ self.settings.name = name def getName(self): return self.settings.name def loadSettings(self, path): """ Loads just settings from file """ self.settings = self.fileManager.getSettings(path) self.settings.filename = "" def saveSession(self): """ Save session to current filename """ self.fileManager.writeCapture(self.dmanager.getRawCountData(), self.dmanager.getRawAIData(), self.dmanager.getCombinedData()) self.needsSaved = False def saveSessionAs(self, path): """ Save session to new filename """ self.settings.filename = path self.saveSession() def startCapture(self): """ Need to reinitialise dmanager. Can't rely on the fact that self.settings is a pointer as need to recalculate stuff like voltage intervals """ self.settings.sanitise() # reinitialise data manager to do things like voltage calcs self.dmanager.initialise(self.settings, self.globalSession, self.errorFnc) # queue for passing data between acquisition and dmanager q = Queue() # set up acquisition process and start self.captureProcess = Process(target=acquire, args=(self.settings, q, self.running)) self.captureProcess.start() # set up data capture process and start self.dAcqThread = Thread(target=self.dmanager.dataCallback, args=(q,)) self.dAcqThread.start() self.needsSaved = True def registerGraphManager(self, graphManager): self.dmanager.registerGraphManager(graphManager) def getRange(self): """ Returns the range required for gauge """ self.settings.sanitise() return self.settings.scans def clearGraph(self): self.dmanager.graphManager.clearPlot() def killCapture(self): """ Kills running capture. Program's behaviour may become undefined """ try: self.captureProcess.terminate() except: self.errorFnc("Could not stop capture process") self.dAcqThread._Thread__stop() self.clearDevice() def stopCapture(self): """ Stops running capture at end of scan (bit of cheat, just deletes the scan before) """ self.running = False self.dmanager.running = False try: self.captureProcess.terminate() except: self.errorFnc("Could not stop capture process") def isCapturing(self): """ Returns true if capturing is in progress """ return self.captureProcess.is_alive() def setGlobalSettings(self, settings): self.globalSettings = settings
from datamanager import DataManager from graphics import Graphics from PIL import ImageFont from project_name_runner import ProjctNumberRunner from due import DueDate from biggest_eur_projects_company import BiggestEurProjects import all_clients db = DataManager() db.create_tables("base_data.sql") pict_ready = 'Picture generating...' start = input(''' 1. project name picure 2. all clients picture 3. due picture 4. 10 bigger project picture Enter a number: ''') def project_name_picture(): data_list = ProjctNumberRunner('project-names.sql') return_list = data_list.dict_to_image() Graphics.setup(mode="RGB", size=(1024, 1024), color="black") Graphics.make_image(return_list, "project-names.png") print(pict_ready) def all_clients_picture():
from datamanager import DataManager from graphics import Graphics from PIL import ImageFont manager = DataManager() manager.run_query('10-biggest-eur-projects-company.sql') def text_size(): x = 0 y = 0 result = [] font_size = 160 for row in enumerate(manager.run_query('10-biggest-eur-projects-company.sql')): print(row) if row[0] % 2 == 1: x = 500 else: x = 0 if row[0] != 0: y += 100 font_size -= 10 text_options = { 'xy': (x, y), 'fill': text_color(row[1][1]), 'font': ImageFont.truetype("SourceSansPro-Regular.otf", font_size), 'text': row[1][0] } print(text_options) result.append(text_options)
def wordCount(): datamanager = DataManager('127.0.0.1', 27017) print 'words count is :', datamanager.count()
def __init__(self): self.__db = DB() self.__auth = OAuth1(*self.__db.get_auth()) self.__url = 'https://api.twitter.com/1.1/statuses/'
from PIL import ImageFont from PIL import Image from PIL import ImageDraw from datamanager import DataManager from graphics import Graphics from due import DueDate db = DataManager() db.create_tables("base_data.sql") data_list = db.run_query("due.sql") new_list = DueDate.time_converter(data_list) return_list = DueDate.make_dict(new_list) Graphics.setup(mode="RGB", size=(1024, 1024), color=(255, 255, 255)) Graphics.make_image(return_list, "due.png")
class Application: """ Handles interaction between GUI events, GUI drawing, plotting, file reading etc. """ def __init__(self, _): """ Args: _ : Command line arguments (not currently used) """ self.configmanager = ConfigManager(".") self.plotter = Plotter(self.configmanager) self.windplotter = WindPlotter(self.configmanager) self.histogram = Histogram() self.msg_queue = None self.loading_timer = None self.data_manager = None self.gui = GUI(self.request_handler) def request_handler(self, request, *args): """ This function is passed to the GUI. When a button is pressed or information is required, the GUI uses this function to access the application. Args: request: One of the request IDs defined in app_reqs.py. Determines the action that is taken. args: List of additional argument that may be required on a per-request basis """ if request == REQS.CHANGE_SUBPLOT1: self.action_subplot_change(0, args[0]) elif request == REQS.CHANGE_SUBPLOT2: self.action_subplot_change(1, args[0]) elif request == REQS.CHANGE_SUBPLOT3: self.action_subplot_change(2, args[0]) elif request == REQS.AVERAGE_SUBPLOT_DATA: self.action_average_data() elif request == REQS.RESET_SUBPLOT_DATA: self.action_reset_average_data() elif request == REQS.SPECIAL_OPTION: self.action_special_option() elif request == REQS.NEW_DATA: self.action_new_data() elif request == REQS.ABOUT_DIALOG: show_about_dialog() elif request == REQS.GET_SPECIAL_ACTIONS: return self.data_manager.get_special_dataset_options(args[0]) elif request == REQS.GET_PLOTTING_STYLE: return self.get_plotting_style_for_field(args[0]) def action_subplot_change(self, subplot_index, display_name): """ Handles request to change subplot data Args: subplot_index : The index of the subpolot (0 to 2) to change display_name : The display name of the requested data series """ get_module_logger().info("Changing subplot %d to %s", subplot_index, display_name) self.plotter.set_visibility(subplot_index, display_name != "None") self.gui.set_displayed_field(display_name, subplot_index) self.gui.set_dataset_choices(self.data_manager.get_numeric_display_names()) if display_name != "None": self.plotter.set_dataset( self.data_manager.get_timestamps(display_name), self.data_manager.get_dataset(display_name), display_name, subplot_index) self.gui.draw(self.plotter) def action_average_data(self): """ Handles request to show the average of a dataset """ # Get the dataset of interest display_name = self.gui.get_selected_dataset_name() # Get the time period over which to average try: time_period = self.gui.get_averaging_time_period() except ValueError: return # Could not convert time period to float if time_period == 0: return # Cannot average over zero time! # Get the units the time period is in (seconds, minutes etc.) time_units = self.gui.get_averaging_time_units() get_module_logger().info("Averaging %s over %d %s", display_name, time_period, time_units.lower()) time_multipliers = {"Seconds":1, "Minutes":60, "Hours":60*60, "Days":24*60*60, "Weeks":7*24*60*60} time_period_seconds = time_period * time_multipliers[time_units] (data, timestamps) = self.data_manager.get_dataset_average(display_name, time_period_seconds) index = self.gui.get_index_of_displayed_plot(display_name) self.plotter.set_dataset(timestamps, data, display_name, index) self.gui.draw(self.plotter) def get_plotting_style_for_field(self, display_name): """ Each field can have a style when plotted. This function build that style based on dataset configuration. If there is no config, the default plot style is a blue line. """ styles = None if self.configmanager.has_dataset_config() and display_name is not None: try: field_name = self.data_manager.get_field_name_from_display_name(display_name) styles = self.configmanager.get_dataset_config('FORMATTING', field_name) styles = [style.strip() for style in styles.split(",")] if styles[0] == '': styles[0] = 'line' #Add the default plot style if len(styles) == 1: styles.append('b') #Add the default colour (blue) except KeyError: pass # This field name not in the config file return ["line", "b"] if styles is None else styles def action_reset_average_data(self): """ Get the dataset of interest and reset the original data """ display_name = self.gui.get_selected_dataset_name() subplot_index = self.gui.get_index_of_displayed_plot(display_name) get_module_logger().info("Resetting dataset %s on subplot %d", display_name, subplot_index) self.plotter.set_dataset( self.data_manager.get_timestamps(display_name), self.data_manager.get_dataset(display_name), display_name, subplot_index) self.gui.draw(self.plotter) def action_new_data(self): """ Handles request to show open a new set of CSV files """ new_directory = ask_directory("Choose directory to process") if new_directory != '' and DataManager.directory_has_data_files(new_directory): get_module_logger().info("Parsing directory %s", new_directory) self.configmanager.load_dataset_config(new_directory) self.gui.reset_and_show_progress_bar("Loading from folder '%s'" % new_directory) self.msg_queue = queue.Queue() self.data_manager = DataManager(self.msg_queue, new_directory, self.configmanager) self.data_manager.start() self.loading_timer = threading.Timer(0.1, self.check_data_manager_status) self.loading_timer.start() def check_data_manager_status(self): """ When the data manager is loading new data, updates the progress bar """ dataloader_finished = False try: msg = self.msg_queue.get(0) if msg == EVT_DATA_LOAD_COMPLETE: self.gui.set_progress_text("Processing data...") self.gui.set_progress_percent(0) elif msg == EVT_DATA_PROCESSING_COMPLETE: # Data has finished loading. dataloader_finished = True self.gui.hide_progress_bar() self.plot_datasets() else: self.gui.set_progress_percent(msg) except queue.Empty: pass except: raise if not dataloader_finished: self.loading_timer = threading.Timer(0.1, self.check_data_manager_status) self.loading_timer.start() def action_special_option(self): """ Handles requests for special options e.g. histogram, windrose plot """ action = self.gui.get_special_action() if action == "Windrose": get_module_logger().info("Plotting windrose") self.gui.add_new_window('Windrose', (7, 6)) # Get the wind direction and speed data speed = self.data_manager.get_dataset('Wind Speed') direction = self.data_manager.get_dataset('Direction') self.windplotter.set_data(speed, direction) # Add window and axes to the GUI try: self.gui.draw(self.windplotter, 'Windrose') except Exception as exc: #pylint: disable=broad-except get_module_logger().info("Could not plot windrose (%s)", exc) show_info_dialog( "Could not plot windrose - check that the windspeed and direction data are valid") elif action == "Histogram": get_module_logger().info("Plotting histogram") self.gui.add_new_window('Histogram', (7, 6)) # Get the data for the histogram dataset_name = self.gui.get_selected_dataset_name() speed = self.data_manager.get_dataset(dataset_name) self.histogram.set_data(speed, dataset_name) # Add window and axes to the GUI self.gui.draw(self.histogram, 'Histogram') def plot_datasets(self): """ Plots the default set of data (from configuration file) """ self.plotter.clear_data() # Get the default fields from config default_fields = self.configmanager.get_global_config('DEFAULT', 'DefaultFields') default_fields = [field.strip() for field in default_fields.split(",")] # Drawing mutiple plots, so turn off drawing until all three are processed self.plotter.suspend_draw(True) field_count = 0 numeric_fields = self.data_manager.get_numeric_field_names() for field in default_fields: if field in numeric_fields: display_name = self.data_manager.get_display_name(field) self.action_subplot_change(field_count, display_name) field_count += 1 # If field count is less than 3, fill the rest of the plots in order from datasets for field in numeric_fields: if field_count == 3: break # No more fields to add if field in default_fields: continue # Already added, move onto next field display_name = self.data_manager.get_display_name(field) self.action_subplot_change(field_count, display_name) field_count += 1 # Now the plots can be drawn self.gui.set_dataset_choices(self.data_manager.get_numeric_display_names()) self.plotter.suspend_draw(False) self.gui.draw(self.plotter)