def make_backup(cleanup=False, scheduler=False): """ Makes a backup of config file, removes all but the last 5 backups """ if scheduler: backup_file = 'config.backup-{}.sched.ini'.format(helpers.now()) else: backup_file = 'config.backup-{}.ini'.format(helpers.now()) backup_folder = plexpy.CONFIG.BACKUP_DIR backup_file_fp = os.path.join(backup_folder, backup_file) # In case the user has deleted it manually if not os.path.exists(backup_folder): os.makedirs(backup_folder) plexpy.CONFIG.write() shutil.copyfile(plexpy.CONFIG_FILE, backup_file_fp) if cleanup: now = time.time() # Delete all scheduled backup older than BACKUP_DAYS. for root, dirs, files in os.walk(backup_folder): ini_files = [os.path.join(root, f) for f in files if f.endswith('.sched.ini')] for file_ in ini_files: if os.stat(file_).st_mtime < now - plexpy.CONFIG.BACKUP_DAYS * 86400: try: os.remove(file_) except OSError as e: logger.error("Tautulli Config :: Failed to delete %s from the backup folder: %s" % (file_, e)) if backup_file in os.listdir(backup_folder): logger.debug("Tautulli Config :: Successfully backed up %s to %s" % (plexpy.CONFIG_FILE, backup_file)) return True else: logger.error("Tautulli Config :: Failed to backup %s to %s" % (plexpy.CONFIG_FILE, backup_file)) return False
def make_backup(cleanup=False, scheduler=False): """ Makes a backup of db, removes all but the last 5 backups """ # Check the integrity of the database first integrity = (integrity_check()['integrity_check'] == 'ok') corrupt = '' if not integrity: corrupt = '.corrupt' plexpy.NOTIFY_QUEUE.put({'notify_action': 'on_plexpydbcorrupt'}) if scheduler: backup_file = 'tautulli.backup-{}{}.sched.db'.format( helpers.now(), corrupt) else: backup_file = 'tautulli.backup-{}{}.db'.format(helpers.now(), corrupt) backup_folder = plexpy.CONFIG.BACKUP_DIR backup_file_fp = os.path.join(backup_folder, backup_file) # In case the user has deleted it manually if not os.path.exists(backup_folder): os.makedirs(backup_folder) db = MonitorDatabase() db.connection.execute('BEGIN IMMEDIATE') shutil.copyfile(db_filename(), backup_file_fp) db.connection.rollback() # Only cleanup if the database integrity is okay if cleanup and integrity: now = time.time() # Delete all scheduled backup older than BACKUP_DAYS. for root, dirs, files in os.walk(backup_folder): db_files = [ os.path.join(root, f) for f in files if f.endswith('.sched.db') ] for file_ in db_files: if os.stat( file_ ).st_mtime < now - plexpy.CONFIG.BACKUP_DAYS * 86400: try: os.remove(file_) except OSError as e: logger.error( "Tautulli Database :: Failed to delete %s from the backup folder: %s" % (file_, e)) if backup_file in os.listdir(backup_folder): logger.debug("Tautulli Database :: Successfully backed up %s to %s" % (db_filename(), backup_file)) return True else: logger.error("Tautulli Database :: Failed to backup %s to %s" % (db_filename(), backup_file)) return False
def _similar_artists(self, callback, artist, threshold): """ callback(artist, sim_artist, match, source) """ timestamp = now() diff = timestamp - self.get_artist_timestamp(artist) if diff < threshold: self._logger.debug(u"similar_artists[%s]: looked up %d seconds ago" % (artist, diff)) return self.set_artist_timestamp(artist, timestamp) try: self._logger.debug(u"similar_artists[%s]: lookup" % (artist)) a = self._artist(artist) if a is None: self._logger.info(u"similar_artists[%s]: no result" % (artist)) return else: p = a.similar self._logger.info(u"similar_artists[%s]: %d result(s)" % (artist, len(p))) for sim in p: callback(artist, sim.name.lower(), sim.stats.match, self._title) except URLError, e: self._logger.error(e) self._logger.info(u"similar_artists[%s]: no result" % (artist)) return
def save(self, *args, **kwargs): instance = super(Card, self).save(*args, **kwargs) if self.planned_start_time and self.planned_start_time.replace(tzinfo=utc) > now(): from app import schedule_card_start_time schedule_card_start_time.apply_async(kwargs={'card_id': str(self.id)}, eta=self.planned_start_time) if self.planned_end_time and self.planned_end_time.replace(tzinfo=utc) > now(): from app import schedule_card_end_time schedule_card_end_time.apply_async(kwargs={'card_id': str(self.id)}, eta=self.planned_end_time) if self.completed_at: self.completed_at = now() instance.save() return instance
def _similar_tracks(self, callback, artist, title, threshold): timestamp = now() diff = timestamp - self.get_track_timestamp(artist, title) if diff < threshold: self._logger.debug(u"similar_tracks[%s-%s]: looked up %d seconds ago" % (artist, title, diff)) return self.set_track_timestamp(artist, title, timestamp) try: self._logger.debug(u"similar_tracks[%s-%s]: lookup" % (artist, title)) self._delay() a = en_song.search(title=title, artist=artist) try: p = en_playlist.static(type='song-radio', song_id=a[0].id, results=100) i = 100.0 self._logger.info(u"similar_tracks[%s-%s]: %d result(s)" % (artist, title, len(p))) for song in p: callback(artist, title, song.artist_name.lower(), song.title.lower(), i / 100.0, self._title) i -= 1.0 except IndexError: self._logger.info(u"similar_tracks[%s-%s]: no result" % (artist, title)) return except Exception, e: self._logger.error(e) self._logger.info(u"similar_tracks[%s-%s]: no result" % (artist, title)) return
def displace(name, target, location): # first, ensure that the target exists if not os.path.exists(target): print error + " Target %s doesn't exist!"%target print "Try one of these:" listArtPaths() return # if it's a link, kill it if os.path.islink(location): bash("rm", location) # if a folder, move it out the way if os.path.exists(location): # create __displaced if not done so yet displaced = os.path.join(app_path, displacementFolder) if not os.path.exists(displaced): bash("mkdir", displaced) # move folder into displaced folder with datetime displaced = os.path.join(app_path, displacementFolder, "%s-%s"%(name, now())) print warn + " Moving %s folder to %s"%(location, displaced) bash("mv", location, displaced) # create the link bash("ln -sf", target, location) bash("ls -al", location)
def _similar_artists(self, callback, artist, threshold): timestamp = now() diff = timestamp - self.get_artist_timestamp(artist) if diff < threshold: self._logger.debug(u"similar_artists[%s]: looked up %d seconds ago" % (artist, diff)) return self.set_artist_timestamp(artist, timestamp) try: self._logger.debug(u"similar_artists[%s]: lookup" % (artist)) self._delay() a = en_artist.search(name=artist) try: i = 100.0 p = a[0].get_similar(results=100) self._logger.info(u"similar_artists[%s]: %d result(s)" % (artist, len(p))) for sim in p: callback(artist, sim.name.lower(), i / 100.0, self._title) i -= 1.0 except IndexError: self._logger.info(u"similar_artists[%s]: no result" % (artist)) return except Exception, e: self._logger.error(e) self._logger.info(u"similar_artists[%s]: no result" % (artist)) return
def track_played(self, track, skipped= False, locked=False): if not locked: self.acquire() if len(self._history) == 0 or self._history[0]['track'] != track: self._logger.error(u"played: no record of starting this track") if not locked: self.release() return if not skipped: self._lastplayed_track = track timestamp= now() self._history[0]['skipped']= skipped if len(self._history) > 1: for i in range(min(self.maxhistory, len(self._history) - 1)): if skipped and self._history[1+i]['skipped']: continue factor= self.__factor_relation(i, self.maxhistory) hist_track= self._history[1+i]['track'] hist_skipped = self._history[1+i]['skipped'] track_relation = TrackRelationFactory.get(track, hist_track) track_relation.update(not hist_skipped, factor) self._logger.info(u"relation updated: %s" % track_relation) self._relation_resetted = False self.update_ranking() if not locked: self.release()
def __init__(self, fpath): try: self.added = now() self.finished = 0 self.fullpath = fpath self.fileid = getNewFileID() tempMediainfo = MediaInfo.parse(self.fullpath) self.mediainfo = {} for track in tempMediainfo.tracks: if track.track_type not in self.mediainfo: self.mediainfo[track.track_type] = track.to_data() else: if track.track_type in ['Audio', 'Subtitle']: if not isinstance(self.mediainfo[track.track_type], list): tempTrack = self.mediainfo[track.track_type] self.mediainfo[track.track_type] = [] self.mediainfo[track.track_type].append(tempTrack) self.mediainfo[track.track_type].append(track.to_data()) self.outputfilename = pacvert.CONFIG.OUTPUT_DIRECTORY+'/'+generateOutputFilename(self.fullpath) self.createThumbs() self.crop = self.analyzeThumbs() self.deleteThumbs() self.updateStatus(2) except Exception as e: logger.error(e)
def __init__(self, trackA, trackB): self.trackA = trackA self.trackB = trackB self.ratingref = RatingFactory.get(0.7, 1.0) self.artistRelation = ArtistRelationFactory.get(trackA.artist, trackB.artist) self.lastused = now() TrackRelationFactory.save(self)
class Card(db.Document): STATUS_CHOICES = ( ('todo', 'Todo'), ('in_progress', 'In progress'), ('in_review', 'In review'), ('done', 'Done'), ) title = db.StringField(required=True, unique_with='board') content = db.StringField(required=True) created_at = db.DateTimeField(default=now()) created_by = db.ReferenceField('User') completed_at = db.DateTimeField(required=False) planned_start_time = db.DateTimeField(required=False) planned_end_time = db.DateTimeField(required=False) status = db.StringField(choices=STATUS_CHOICES, default='todo') comments = db.ListField(db.ReferenceField('Comment')) board = db.ReferenceField('Board') def save(self, *args, **kwargs): instance = super(Card, self).save(*args, **kwargs) if self.planned_start_time and self.planned_start_time.replace(tzinfo=utc) > now(): from app import schedule_card_start_time schedule_card_start_time.apply_async(kwargs={'card_id': str(self.id)}, eta=self.planned_start_time) if self.planned_end_time and self.planned_end_time.replace(tzinfo=utc) > now(): from app import schedule_card_end_time schedule_card_end_time.apply_async(kwargs={'card_id': str(self.id)}, eta=self.planned_end_time) if self.completed_at: self.completed_at = now() instance.save() return instance
def main(): parser = argparse.ArgumentParser() parser.add_argument( '-i', '--image', action='append', help='Image (name or ID) to use. Can be specified multiple times. ' 'If none are provided, uses "CC-CentOS7"', ) parser.add_argument( '--key-name', type=str, default='default', help='SSH keypair name on OS used to create an instance.' ) parser.add_argument( '--key-file', type=str, default=os.environ.get('KEY_FILE', '~/.ssh/id_rsa'), help='Path to SSH key associated with the key-name. If not provided, ' 'falls back to envvar KEY_FILE then to the string "~/.ssh/id_rsa"', ) parser.add_argument( '-n', '--no-clean', action='store_true', help='Don\'t clean up the lease on a crash (allows for debugging)', ) parser.add_argument( '-v', '--verbose', action='store_true', help='Increase verbosity', ) add_arguments(parser) args = parser.parse_args() images = args.image if not images: images = ['CC-CentOS7', 'CC-Ubuntu16.04'] if args.verbose: print('testing images: {}'.format(images)) key_file = args.key_file key_file = os.path.expanduser(key_file) if args.verbose: print('key file: {}'.format(key_file)) session, rc = session_from_args(args=args, rc=True) lease = Lease( keystone_session=session, name='test-lease-{}'.format(BUILD_TAG), length=datetime.timedelta(minutes=240), sequester=True, _no_clean=args.no_clean, ) print(now(), 'Lease: {}'.format(lease)) with lease: sleep = 0 # sleep between loops to let the instance get torn down for image in args.image: time.sleep(sleep) sleep = 30 print('-'*80) print('Starting test with image "{}"'.format(image)) test_simple(lease, session, rc, key_file, args.key_name, image=image)
def update(self, diff, new, factor=1.0): if diff > 0.0: self.lastplayed = now() if new: self.track_cnt += 1 diff += 0.5 self.track_rating += diff if self.track_cnt > 0: self.rating = self.track_rating / self.track_cnt Logger.info(u"updated rating: %s" % self)
def _check_avail(): global _data, _unavailable_data, _lock, _bootstrap_limit with _lock: if _data is _unavailable_data: if _bootstrap_limit <= helpers.now(): _unavailable_data = None else: raise UnavailableDataError()
def test_create_card(self, email_end_celery_task, email_start_celery_task): user, token = self.login_and_generate_jwt_token() board = self.create_dummy_board(created_by=user) response = self.app.post('/board/{}/card'.format(board.id), headers=self.auth_header(token), data=json.dumps({ "title": "Test", "content": "Test" })) self.assertEqual(response.status_code, 201) self.assertEqual(response.json.get('title'), "Test") self.assertEqual(response.json.get('board'), str(board.id)) # Create card with planned start and end time start_time = now() + timedelta(days=1) end_time = now() + timedelta(days=7) response = self.app.post('/board/{}/card'.format(board.id), headers=self.auth_header(token), data=json.dumps({ "title": self.get_random_string(), "content": self.get_random_string(), "planned_start_time": start_time.strftime("%d/%m/%Y %H:%M"), "planned_end_time": end_time.strftime("%d/%m/%Y %H:%M"), })) card = Card.objects.get(id=response.json.get('id')) self.assertEqual(start_time.strftime("%d/%m/%Y %H:%M"), card.planned_start_time.strftime("%d/%m/%Y %H:%M")) self.assertEqual(end_time.strftime("%d/%m/%Y %H:%M"), card.planned_end_time.strftime("%d/%m/%Y %H:%M")) email_start_celery_task.apply_async.assert_called_once_with( kwargs={'card_id': str(card.id)}, eta=datetime.strptime(start_time.strftime("%d/%m/%Y %H:%M"), "%d/%m/%Y %H:%M")) email_end_celery_task.apply_async.assert_called_once_with( kwargs={'card_id': str(card.id)}, eta=datetime.strptime(end_time.strftime("%d/%m/%Y %H:%M"), "%d/%m/%Y %H:%M"))
def _write_tx(self, source, dest, amount, memo, ts=None): data = { 'payer_id': source, 'payee_id': dest, 'amount': amount, 'memo': memo, 'tx_timestamp': ts if ts else h.now() } return self.db.transaction.upsert(data)
class Board(db.Document): STATUS_CHOICES = ( ('active', 'Active'), ('archived', 'Archived'), ) name = db.StringField(required=True, unique=True) status = db.StringField(choices=STATUS_CHOICES, default='active') created_at = db.DateTimeField(default=now()) created_by = db.ReferenceField('User') cards = db.ListField(db.ReferenceField('Card'))
def _to_escrow(self, moiner, amt): tx_msg = (f'Mining for {moiner}. Escrow group id: {self.pool_id}') if self._pay('pool', 'escrow', amt, tx_msg): self.db.escrow.upsert({ 'escrow_group_id': self.pool_id, 'tx_timestamp': h.now(), 'payer_id': 'pool', 'payee_id': moiner, 'amount': amt, 'memo': tx_msg })
def track_started(self, track, locked=False): if not locked: self.acquire() self._logger.info(u"started: %s" % track) timestamp= now() self._playing_track = track # add this track to the history self._history.extendleft([{'track': track,'skipped':False}]) self.lookup(track, locked=True) self.update_ranking(locked=True) # CONFIG: max history length? if len(self._history) > 100: self._history.pop() if not locked: self.release()
def schedule_card_end_time(card_id): from database.models import Card try: card = Card.objects.get(id=card_id) except DoesNotExist: return if not card.planned_end_time: return if now() > card.planned_end_time.replace(tzinfo=utc): send_mail("Please end card")
def load_track(track): timestamp = datetime.utcnow() Logger.info(u"loading track: %s" % track) track_key = track.key() if TrackRelationFactory.__use_db: TrackRelationFactory.__db.execute( u"SELECT relation FROM track_relation "\ "WHERE trackA = ? or trackB = ?", (track_key, track_key)) row = TrackRelationFactory.__db.fetchone() while row: relation = pickle.loads(str(row[0])) TrackRelationFactory.register(relation) row = TrackRelationFactory.__db.fetchone() elif TrackRelationFactory.__use_fs: path = join(TrackRelationFactory.__path, track.artist.key(), track_key) if isdir(path): for entry in listdir(path): if not entry.endswith(u".pkl"): continue rel_key = entry[:-4] f = join(path, entry) if not isfile(f): continue load = False if not TrackRelationFactory.__lib.has_key(rel_key): load = True else: modtime = getmtime(f) relation = TrackRelationFactory.by_key(rel_key) if modtime != relation.lastmodified: TrackRelationFactory.forget(relation) load = True Logger.info(u"loading track: file has been modified %s" % f) if load: TrackRelationFactory.load_file(f) nof_entries = TrackRelationFactory.len() Logger.info(u"loading track: %s" % ((datetime.utcnow() - timestamp))) while nof_entries > TrackRelationFactory.__maxentries: too_delete = (nof_entries - TrackRelationFactory.__maxentries) Logger.info(u"forgetting %d track relations" % too_delete) min_used = now() for v in TrackRelationFactory.__lib.values(): if v.lastused < min_used: min_used = v.lastused for v in TrackRelationFactory.__lib.values(): if v.lastused == min_used: Logger.info(u"forgetting %s" % v) TrackRelationFactory.forget(v) too_delete -= 1 if too_delete == 0: break nof_entries = TrackRelationFactory.len() Logger.info(u"track relations: %d" % nof_entries)
def listening_for(self, message): if h.now() > self.next_pool: self._reset() _handle = False text = message.get('text') user = message.get('metadata', {}).get('source_user') if (isinstance(text, str) and user and user not in self.moined and user not in self.pool_excludes): _handle = 'moin' in text.lower() return _handle
def listening_for(self, message): if h.now() > getattr(self, 'next_pool', 0): self._update_pool() _handle = False text = message.get('text') if isinstance(text, str): params = re.split(r'\s+', text.lower()) if (len(params) > 1 and params[0] in self.triggers and params[1] == 'pool'): _handle = True return _handle
def load_artist(artist): timestamp = datetime.utcnow() Logger.info(u"loading artist: %s" % artist) artist_key = artist.key() if ArtistRelationFactory.__use_db: ArtistRelationFactory.__db.execute( u"SELECT relation FROM artist_relation "\ "WHERE artistA = ? or artistB = ?", (artist_key, artist_key)) row = ArtistRelationFactory.__db.fetchone() while row: relation = pickle.loads(str(row[0])) ArtistRelationFactory.register(relation) row = ArtistRelationFactory.__db.fetchone() if ArtistRelationFactory.__use_fs: path = join(ArtistRelationFactory.__path, artist_key) if isdir(path): for entry in listdir(path): if not entry.endswith(u".pkl"): continue rel_key = entry[:-4] f = join(path, entry) if not isfile(f): continue load = False if not ArtistRelationFactory.__lib.has_key(rel_key): load = True else: modtime = getmtime(f) relation = ArtistRelationFactory.by_key(rel_key) if modtime != relation.lastmodified: ArtistRelationFactory.forget(relation) load = True Logger.info(u"loading artist: file has been modified %s" % f) if load: ArtistRelationFactory.load_file(f) nof_entries = ArtistRelationFactory.len() Logger.info(u"loading artist: %s" % (datetime.utcnow() - timestamp)) while nof_entries > ArtistRelationFactory.__maxentries: too_delete = (nof_entries - ArtistRelationFactory.__maxentries) Logger.info(u"forgetting %d artist relations" % too_delete) min_used = now() for v in ArtistRelationFactory.__lib.values(): if v.lastused < min_used: min_used = v.lastused for v in ArtistRelationFactory.__lib.values(): if v.lastused == min_used: Logger.info(u"forgetting %s" % v) ArtistRelationFactory.forget(v) too_delete -= 1 if too_delete == 0: break nof_entries = ArtistRelationFactory.len() Logger.info(u"artist relations: %d" % nof_entries)
def updateStatus(self, newVal): """ Update status of scanned file and resort the queue. """ logger.debug("Setting "+self.fullpath+" from status "+helpers.statusToString(self.status).lower()+" to "+helpers.statusToString(newVal).lower()) # set status of element. self.status = newVal if newVal == 0: # active self.timestarted = now() elif newVal == 3: # finished self.deleteOriginal() # delete original if successful transcoded and file deletion is enabled. self.performRename() # rename file if file was renamed via webinterface
def _update_pool(self): _now = h.now() if self.next_pool <= _now: self.next_pool = _now + (randint(4, 15) * 3600) amt = randint(25, 75) * 10 pool_balance = self._get_balance('pool', True) if self._update_balance('pool', pool_balance + amt): if self._write_tx('None', 'pool', amt, 'daily pool deposit', _now): self.db.pool_history.upsert({ 'fillup_ts': _now, 'next_fillup_ts': self.next_pool, 'amount': amt })
def _get_time_to_next_fill_up(self): out = [] _now = h.now() diff = self.next_pool - _now if diff < 60: return f'{diff} Seconds' hours = diff // 3600 if hours: out.append(f'{hours} Hours') remain = diff % 3600 minutes = remain // 60 if minutes: out.append(f'{minutes} Minutes') return ', '.join(out)
def log(self, message, level): logger = logging.getLogger('mylar') threadname = threading.currentThread().getName() if level != 'DEBUG': import helpers herp.LOG_LIST.insert(0, (helpers.now(), message, level, threadname)) message = threadname + ' : ' + message if level == 'DEBUG': logger.debug(message) elif level == 'INFO': logger.info(message) elif level == 'WARNING': logger.warn(message) else: logger.error(message)
def save_configuration(): global _data_dir # Don't write if there's no destination if _data_dir is None: return False rm_old_files() snapshot = {} # Get a raw copy of all data snapshot[ 'copy'] = data.get_copy() # Get current system state snapshot[ 'cluster'] = sync.cluster_state.get_state() # Write this configuration file_name = _data_dir + '/' +helpers.now().strftime( DATA_DIR_STRFTIME) with open(file_name, 'w') as f: f.write( helpers.dump_json( snapshot)) return True
def save_configuration(): global _data_dir # Don't write if there's no destination if _data_dir is None: return False rm_old_files() snapshot = {} # Get a raw copy of all data snapshot['copy'] = data.get_copy() # Get current system state snapshot['cluster'] = sync.cluster_state.get_state() # Write this configuration file_name = _data_dir + '/' + helpers.now().strftime(DATA_DIR_STRFTIME) with open(file_name, 'w') as f: f.write(helpers.dump_json(snapshot)) return True
def played(self, factor=1.0): self.playcount += 1 self.lastplayed = now() self.track.played(factor)
def main(): parser = argparse.ArgumentParser() parser.add_argument( '-i', '--image', action='append', help='Image (name or ID) to use. Can be specified multiple times. ' 'If none are provided, uses "CC-CentOS7"', ) parser.add_argument( '--key-name', type=str, default='default', help='SSH keypair name on OS used to create an instance.') parser.add_argument( '--key-file', type=str, default=os.environ.get('KEY_FILE', '~/.ssh/id_rsa'), help='Path to SSH key associated with the key-name. If not provided, ' 'falls back to envvar KEY_FILE then to the string "~/.ssh/id_rsa"', ) parser.add_argument( '-n', '--no-clean', action='store_true', help='Don\'t clean up the lease on a crash (allows for debugging)', ) parser.add_argument( '-v', '--verbose', action='store_true', help='Increase verbosity', ) add_arguments(parser) args = parser.parse_args() images = args.image if not images: images = ['CC-CentOS7', 'CC-Ubuntu16.04'] if args.verbose: print('testing images: {}'.format(images)) key_file = args.key_file key_file = os.path.expanduser(key_file) if args.verbose: print('key file: {}'.format(key_file)) session, rc = session_from_args(args=args, rc=True) lease = Lease( keystone_session=session, name='test-lease-{}'.format(BUILD_TAG), length=datetime.timedelta(minutes=240), sequester=True, _no_clean=args.no_clean, ) print(now(), 'Lease: {}'.format(lease)) with lease: sleep = 0 # sleep between loops to let the instance get torn down for image in args.image: time.sleep(sleep) sleep = 30 print('-' * 80) print('Starting test with image "{}"'.format(image)) test_simple(lease, session, rc, key_file, args.key_name, image=image)
def __init__(self): self.dob = now() self._current = self.get_current_state()
def elapsed(self): e = int((now() - self.dob).total_seconds()) # Handle time error margins that can cause this to be negative: return max(0, e)
def _touch_last_push(self): self._last_push = helpers.now()
def queued(self): self.lastqueued = now()
def queued(self): self.lastqueued = now() self.track.queued()
def seed(self, track, locked=False): """Calculate relations based on track as seed. """ if not locked: self.acquire() benchmark = Benchmark() timestamp = now() seed_track = set() seed_artist = set() if track: seed_track.add(track) seed_artist.add(track.artist) self.lookup(track, True) # check artist relations cnt = 0 benchmark.start() tt = [] for seed_a in seed_artist.union(self._seed_artists): self._logger.info(u'check artist relations for {}'.format(seed_a)) for artist_relation in ArtistRelationFactory.by_artist(seed_a): cnt += 1 other_artist = artist_relation.artistA if artist_relation.artistA.name == seed_a.name: other_artist = artist_relation.artistB other_artist.relation_sum += artist_relation.rating other_artist.relation_cnt += 1 other_artist.relation = (other_artist.relation_sum / other_artist.relation_cnt) top_ten(tt, u'artist related with {}({}/{}={}) to {}'.format( scale_rating(artist_relation.rating), scale_rating(other_artist.relation_sum), scale_rating(other_artist.relation_cnt), scale_rating(other_artist.relation), other_artist), artist_relation.rating) artist_relation.lastused = timestamp top_ten_dump(tt, self._logger.info) self._logger.info(u"update ranking: check artist took %s" % benchmark) self._logger.info(u"updated %d artist(s)" % cnt) cnt = 0 benchmark.start() tt = [] for seed_t in seed_track.union(self._seed_tracks): self._logger.info(u'check track relations for {}'.format(seed_t)) for track_relation in TrackRelationFactory.by_track(seed_t): other_track = track_relation.trackA if track_relation.trackA.title == seed_t.title and \ track_relation.trackA.artist.name == seed_t.artist.name: other_track = track_relation.trackB cnt += 1 if not track.ban: other_track.relation_sum += track_relation.rating other_track.relation_cnt += 1 other_track.relation = (other_track.relation_sum / other_track.relation_cnt) top_ten(tt, u'track related with {} to {}'.format( scale_rating(track_relation.rating), other_track), track_relation.rating) track_relation.lastused = timestamp top_ten_dump(tt, self._logger.info) self._logger.info(u"update ranking: check track took %s" % benchmark) self._logger.info(u"updated %d track(s)" % cnt) if not locked: self.release()
def played(self, factor=1.0): self.lastplayed = now() self.update(True, factor)
def started(self): self.laststarted = now() self.artist.started()
def queued(self): self.lastqueued = now() self.artist.queued()
def started(self): self.laststarted = now() if self.track: self.track.started()
def started(self): self.laststarted = now()
def handle(self, message): response = self._format_get_pool() if response: self.next_pool = h.now() + 60 opts = self.build_reply_opts(message) self.reply(message, response, opts)
def next_file(self, locked=False): self._logger.info(u"next file: start") if not locked: self.acquire() if not self._ranking_updated: self.update_ranking() best_ranking = 0 best_tracks = [] tt = [] timestamp = now() # calculate last_*_timestamps (played / queued / started) track_lastplayed_timestamp = timestamp - self._thres_track_lastplayed track_laststarted_timestamp = timestamp - self._thres_track_laststarted track_lastqueued_timestamp = timestamp - self._thres_track_lastqueued artist_lastplayed_timestamp = timestamp - self._thres_artist_lastplayed artist_laststarted_timestamp = timestamp - self._thres_artist_laststarted artist_lastqueued_timestamp = timestamp - self._thres_artist_lastqueued has_active_tracks = False for track in TrackFactory.active_tracks(): has_active_tracks = True artist = track.artist factor = 1.0 if (track.lastplayed > track_lastplayed_timestamp): factor = min(factor, 1.0 - ((1.0 * track.lastplayed - track_lastplayed_timestamp) / self._thres_track_lastplayed)) if (artist.lastplayed > artist_lastplayed_timestamp): factor = min(factor, 1.0 - ((1.0 * artist.lastplayed - artist_lastplayed_timestamp) / self._thres_artist_lastplayed)) if (track.laststarted > track_laststarted_timestamp): factor = min(factor, 1.0 - ((1.0 * track.laststarted - track_laststarted_timestamp) / self._thres_track_laststarted)) if (artist.laststarted > artist_laststarted_timestamp): factor = min(factor, 1.0 - ((1.0 * artist.laststarted - artist_laststarted_timestamp) / self._thres_artist_laststarted)) if (track.lastqueued > track_lastqueued_timestamp): factor = min(factor, 1.0 - ((1.0 * track.lastqueued - track_lastqueued_timestamp) / self._thres_track_lastqueued)) if (artist.lastqueued > artist_lastqueued_timestamp): factor = min(factor, 1.0 - ((1.0 * artist.lastqueued - artist_lastqueued_timestamp) / self._thres_artist_lastqueued)) ranking = int(self._ranking_base * factor * track.ranking) if ranking > best_ranking: self._logger.debug("%2.2f (best=): %s" % (ranking, track)) best_ranking = ranking best_tracks = [track] elif ranking == best_ranking: self._logger.debug("%2.2f (best+): %s" % (ranking, track)) best_tracks.append(track) top_ten(tt, track, ranking) if not has_active_tracks: self._logger.error(u"No active tracks") if not locked: self.release() return None top_ten_dump(tt, self._logger.info, u"rank") self._logger.info("best tracks: %d" % (len(best_tracks))) best_track = choice(best_tracks) best_track.started() # pick the best file best_rating = 0.0 best_files = [] for file in best_track.files(): if not file.active: continue t = file.playcount / (1.0 + file.skipcount) if t > best_rating: best_rating = t best_files = [file] elif t == best_rating: best_files.append(file) if not locked: self.release() self._logger.info(u"next file: stop") return choice(best_files)
def _touch_last_reachable(self): self._reachable = True self._last_reachable = helpers.now()
class Comment(db.Document): content = db.StringField(required=True) created_at = db.DateTimeField(default=now()) created_by = db.ReferenceField('User') card = db.ReferenceField('Card')
def __init__(self, artistA, artistB): self.artistA = artistA self.artistB = artistB self.lastused = now() ArtistRelationFactory.save(self)