def initDB(master): """Tries to load a DB, if that fails it creates a new one.""" if os.path.isfile(BACKUP_XML_DEST): try: db = xmlparser.databaseFromXML(BACKUP_XML_DEST) except: try: db = xmlparser.databaseFromXML(DEST) except (IOError, EOFError): db = core.Database() print("No save found, creating new one") else: try: # Legacy load from .pkl # db = core.load(master.saveDest) db = xmlparser.databaseFromXML(DEST) except (IOError, EOFError): db = core.Database() print("No save found, creating new one") dbh = DatabaseHandler(master, db) # Overrides generic resizeEvent to resize Entry View IFFY master.ui.centralwidget.resizeEvent = dbh.resizeEvent dbh.resized() master.databaseHandler = dbh return dbh
def test_scanBeatportID(self): f = Path(filedir() / 'data/9348620_take_care.flac') db = core.Database() core.addTrackToDB(f, db) db.scanBeatportID([f]) self.assertEqual(db.db[9348620].file_path, filedir() / 'data/9348620_take_care.flac')
def test_doFuzzyMatch(self): # 5945839 and 9348620 are the same f = Path(filedir() / 'data/9348620_take_care.flac') db = core.Database() core.doFuzzyMatch(f, db) tr = db.db[5945839] self.assertEqual(tr.beatport_id, 5945839) self.assertEqual(tr.artists, ['Ronny Vergara']) self.assertEqual(tr.title, 'Take Care') self.assertEqual(tr.remixer, 'Hackler & Kuch Remix')
def checkDataFromClass(data: list, DIRECTORY='testDatabase') -> None: """Testa o comportamento da classe""" FILENAME = 'testDatabase.json' database = core.Database(directory=DIRECTORY, filename=FILENAME) database.add(data) messageWaiting = database.getWaitingMessages() for x in range(len(messageWaiting)): assert messageWaiting[x]['title'] == f'titulo{x}' assert messageWaiting[x]['link'] == f'link{x}' assert messageWaiting[x]['sendDate'] == f'date{x}'
def test_addTrackToDB(self): db = core.Database() core.addTrackToDB(filedir() / 'data/9348620_take_care.flac', db) tr = db.db[9348620] self.assertEqual(tr.beatport_id, 9348620) self.assertEqual(tr.artists, ['Ronny Vergara']) self.assertEqual(tr.title, 'Take Care') self.assertEqual(tr.album, 'Remixes Compilation VOL02') self.assertEqual(tr.remixer, 'Hackler & Kuch Remix') self.assertEqual(tr.released, '2017-06-05') self.assertEqual(tr.bpm, 126) self.assertEqual(tr.genre, 'Techno (Peak Time / Driving)') self.assertEqual(tr.label, 'Dolma Records') self.assertEqual(tr.catalog, 'DM098')
def databaseFromXML(xmlURL): tree = ET.ElementTree() root = tree.parse(xmlURL) database = core.Database() for entryNode in root.iter('entry'): entry = core.Entry() entry.text = entryNode.text if entryNode.text is None: entry.text = '' attrib = entryNode.attrib entry.color = makeString(attrib['color']) entry.title = makeString(attrib['title']) entry.timeCreated = float(attrib['timeCreated']) entry.timeLastEdited = float(attrib['timeLastEdited']) database.addEntry(entry) return database
def test_flac_fileTagsUpdate(self): f = Path(filedir() / 'data/9348620_take_care.flac') db = core.Database() core.addTrackToDB(f, db) tr = db.db[9348620] tr.fileTagsUpdate(force=True) audiof = FLAC(tr.file_path) self.assertEqual(audiof['ARTIST'], ['Ronny Vergara']) self.assertEqual(audiof['DATE'], ['2017']) # we save only year into the file self.assertEqual(audiof['GENRE'], ['Techno (Peak Time / Driving)']) self.assertEqual(audiof['ORGANIZATION'], ['Dolma Records']) self.assertEqual(audiof['TITLE'], ['Take Care (Hackler & Kuch Remix)']) self.assertEqual(audiof['ALBUM'], ['Remixes Compilation VOL02']) self.assertEqual(audiof['BPM'], ['126'])
parser.add_argument('-c', '--clean-tags', action='store_true', help='clean tags in audio files') parser.add_argument('-v', '--verbose', action='store_true', help='verbose output') parser.add_argument('-a', '--artwork', action='store_true', help='update track artwork') parser.add_argument('-r', '--recursive', action='store_true', help='run recursive') parser.add_argument('-z', '--fuzzy', action='store_true', help='try to fuzzy match') parser.add_argument('-i', '--input', help='specify input', default='') parser.add_argument('-f', '--force', action='store_true', help='force tag overwrite') parser.add_argument('--save-db', help='save tags to database', default='local.db') parser.add_argument('--load-db', help='load tags from database', default='local.db') return parser if __name__ == "__main__": print('*** welcome beatport_tagger ***') # main db db = core.Database() # input parser input_parser = argsParserInit() args = input_parser.parse_args() # args check if len(sys.argv) <= 1: input_parser.print_help() sys.exit(0) # load existing db if isfile(args.load_db): print('\n** database found! loading data') db.loadJSON(args.load_db) print(f'** number of tracks in db: {len(db.db)}' )
from BotAuth import DataBot if __name__ == '__main__': # verificar versao do python if sys.version_info[0] < 3 or sys.version_info[1] < 6: sys.exit('Este algoritmo requer uma versao do python 3.6 ou superior!') print('[CORE] Iniciando algoritmo ...') URLS = [ 'http://portal.uern.br/blog/category/noticias/feed/', 'https://aduern.org.br/category/noticias/feed/' ] token, chatId = DataBot.readJson() database = core.Database(sizeHistory=len(URLS) * 100) files = core.downloadXML(URLS) if len(files) > 0: analyze = core.AnalyzeRSS(filenames=files) datas = analyze.getData() database.add(datas) messages = database.getWaitingMessages() bot = core.BotTelegram(token, chatId) for message in messages: messageToSend = message['title'] + '\n' + message['link'] responses = bot.sendMessage(messageToSend)