def __init__(self, file): core.AVContainer.__init__(self) self.mime = 'video/x-ms-asf' self.type = 'asf format' self._languages = [] self._extinfo = {} h = file.read(30) if len(h) < 30: raise ParseError() (guidstr, objsize, objnum, reserved1, \ reserved2) = struct.unpack('<16sQIBB', h) guid = self._parseguid(guidstr) if (guid != GUIDS['ASF_Header_Object']): raise ParseError() if reserved1 != 0x01 or reserved2 != 0x02: raise ParseError() log.debug('Header size: %d / %d objects' % (objsize, objnum)) header = file.read(objsize - 30) for _ in range(0, objnum): h = self._getnextheader(header) header = header[h[1]:] del self._languages del self._extinfo
def lineReceived(self, raw_message): try: log.debug('ctrl:mpv: received %s' % raw_message) msg = json.loads(raw_message.decode('utf-8')) except ValueError: self.transport.loseConnection() else: if 'request_id' in msg \ and int(msg['request_id']) in self.__commands_cb: cb = self.__commands_cb[int(msg['request_id'])] cb.callback(msg) elif 'error' in msg: if msg['error'] != 'success': self.factory.handler.error(msg['error']) elif 'event' in msg: try: f = getattr(self.factory.handler, 'EVENT_%s' % msg['event'].replace('-', '_')) except AttributeError: log.debug('ctrl:mpv: unknown event received: %s' % msg['event']) else: for unwanted_arg in ('event', 'id'): if unwanted_arg in msg: del msg[unwanted_arg] f(**msg) else: log.err('ctrl:mpv: unhandled message received: %s' % msg)
def progressive(self, file): """ Try to find out with brute force if the mpeg is interlaced or not. Search for the Sequence_Extension in the extension header (01B5) """ file.seek(0) buffer = '' count = 0 while 1: if len(buffer) < 1000: count += 1 if count > 1000: break buffer += file.read(1024) if len(buffer) < 1000: break pos = buffer.find('\x00\x00\x01\xb5') if pos == -1 or len(buffer) - pos < 5: buffer = buffer[-10:] continue ext = (buffer[pos + 4] >> 4) if ext == 8: pass elif ext == 1: if (buffer[pos + 5] >> 3) & 1: self._set('progressive', True) else: self._set('interlaced', True) return True else: log.debug('ext: %r' % ext) buffer = buffer[pos + 4:] return False
def _parsekv2(self, s): pos = 0 strno, descriptorlen, descriptortype, valuelen = struct.unpack( '<2xHHHI', s[pos:pos + 12]) pos += 12 descriptorname = s[pos:pos + descriptorlen] pos += descriptorlen descriptorvalue = s[pos:pos + valuelen] pos += valuelen value = None if descriptortype == 0x0000: # Unicode string value = descriptorvalue elif descriptortype == 0x0001: # Byte Array value = descriptorvalue elif descriptortype == 0x0002: # Bool value = struct.unpack('<H', descriptorvalue)[0] != 0 pass elif descriptortype == 0x0003: # DWORD value = struct.unpack('<I', descriptorvalue)[0] elif descriptortype == 0x0004: # QWORD value = struct.unpack('<Q', descriptorvalue)[0] elif descriptortype == 0x0005: # WORD value = struct.unpack('<H', descriptorvalue)[0] else: log.debug('Unknown Descriptor Type %d' % descriptortype) return (pos, descriptorname, value, strno)
def _parsekv(self, s): pos = 0 (descriptorlen, ) = struct.unpack('<H', s[pos:pos + 2]) pos += 2 descriptorname = s[pos:pos + descriptorlen] pos += descriptorlen descriptortype, valuelen = struct.unpack('<HH', s[pos:pos + 4]) pos += 4 descriptorvalue = s[pos:pos + valuelen] pos += valuelen value = None if descriptortype == 0x0000: # Unicode string value = descriptorvalue elif descriptortype == 0x0001: # Byte Array value = descriptorvalue elif descriptortype == 0x0002: # Bool (?) value = struct.unpack('<I', descriptorvalue)[0] != 0 elif descriptortype == 0x0003: # DWORD value = struct.unpack('<I', descriptorvalue)[0] elif descriptortype == 0x0004: # QWORD value = struct.unpack('<Q', descriptorvalue)[0] elif descriptortype == 0x0005: # WORD value = struct.unpack('<H', descriptorvalue)[0] else: log.debug('Unknown Descriptor Type %d' % descriptortype) return (pos, descriptorname, value)
def parse_files(self, folder, files, media_dict, force): for f in files: filepath = os.path.join(folder.path, f) if os.path.isfile(filepath): log.debug('library: updating file %s' % filepath) else: log.debug('library: skipping broken symlink %s' % filepath) continue need_update = True if filepath in media_dict: file_obj = media_dict[filepath] need_update = force or os.stat( filepath).st_mtime > file_obj.last_modified elif self.parser.is_supported(filepath): file_obj = self.OBJECT_CLASS(filename=f, folder=folder) else: continue if need_update: file_obj = self._get_file_info(file_obj) if file_obj is None: continue file_obj.last_modified = int(time.time()) else: self.parser.extra_parse(file_obj) if filepath in media_dict: del media_dict[filepath]
def stop_if_idle(self): self.__stop_watchdog = None if self.state['playback'] == PLAYER_STOP: self.stop() else: log.debug( 'ctrl:mpv: not stopping mpv process, playback in progress')
def __init__(self, file): core.AVContainer.__init__(self) # read the header h = file.read(12) if h[:4] not in (b"RIFF", b'SDSS', b'RMP3', ): raise ParseError('unsupported RIFF header: %s' % h[:4]) self.has_idx = False self.header = {} self.junkStart = None self.infoStart = None self.type = h[8:12] if self.type == b'AVI ': self.mime = 'video/avi' elif self.type == b'WAVE': self.mime = 'audio/wav' try: while self._parseRIFFChunk(file): pass except IOError: log.err('error in file, stop parsing') self._find_subtitles(file.name) if not self.has_idx and self.media == core.MEDIA_AV: log.debug('WARNING: avi has no index') self._set('corrupt', True)
def stop_watching_dir(self, dir_path): try: self.ignore(twisted.python.filepath.FilePath(dir_path)) except KeyError: log.info("inotify: failed to stop watching directory '%s' " "(not watched?)" % dir_path) else: log.debug("inotify: stopped watching directory '%s'" % dir_path)
def process_tracks(self, tracks): tracksbuf = tracks.get_data() index = 0 while index < tracks.get_len(): trackelem = EbmlEntity(tracksbuf[index:]) log.debug ('ELEMENT %X found' % trackelem.get_id()) self.process_track(trackelem) index += trackelem.get_total_len() + trackelem.get_crc_len()
def EVENT_idle(self): self.state['playback'] = PLAYER_STOP if self.__monitor.running: self.__monitor.stop() if self.__stop_watchdog: log.debug('ctrl:mpv: stop watchdog reset') self.__stop_watchdog.cancel() self.__stop_watchdog = reactor.callLater(600, self.stop_if_idle) self.player.dispatch_signame('player.status')
def watch_dir(self, dir_path, library): event_processor = functools.partial(self.process_event, library=library, dir_path=dir_path) try: self.watch(twisted.python.filepath.FilePath(dir_path), self.IN_WATCH_MASK, callbacks=[event_processor]) except twisted.python._inotify.INotifyError: log.debug("inotify: dir to watch gone '%s'" % dir_path) else: log.debug("inotify: watching directory '%s'" % dir_path)
def playback_stop(self): if not self.alive(): log.debug('ctrl:mpv: no mpv process, ignoring stop command') d = defer.Deferred() d.callback(True) return d if self.__monitor.running: self.__monitor.stop() self.when_playing_cb.clear() return self.command('stop')
def __message(self, bus, message): if message.type == Gst.MessageType.EOS: self._end() elif message.type == Gst.MessageType.TAG: self.__update_metadata(message.parse_tag()) elif message.type == Gst.MessageType.ERROR: err, debug = message.parse_error() log.err("Gstreamer Error: %s" % str(err)) log.debug("Gstreamer Error debug: %s" % str(debug)) elif message.type == Gst.MessageType.STREAM_START: if self.__in_gapless_transition: self._end() return True
def _parseODML(self, t): retval = {} size = len(t) i = 0 key = t[i:i + 4] sz = struct.unpack('<I', t[i + 4:i + 8])[0] i += 8 value = t[i:] if key != b'dmlh': log.debug("_parseODML: Error") i += sz - 8 return (retval, i)
def process_track(self, track): # Collapse generator into a list since we need to iterate over it # twice. elements = [x for x in self.process_one_level(track)] track_type = [x.get_value() for x in elements if x.get_id() == MATROSKA_TRACK_TYPE_ID] if not track_type: log.debug('Bad track: no type id found') return track_type = track_type[0] track = None if track_type == MATROSKA_VIDEO_TRACK: log.debug('Video track found') track = self.process_video_track(elements) elif track_type == MATROSKA_AUDIO_TRACK: log.debug('Audio track found') track = self.process_audio_track(elements) elif track_type == MATROSKA_SUBTITLES_TRACK: log.debug('Subtitle track found') track = core.Subtitle() self.set_track_defaults(track) track.id = len(self.subtitles) self.subtitles.append(track) for elem in elements: self.process_track_common(elem, track)
def connectionMade(self): log.debug('ctrl:mpv: connection to player process made') self.factory.conn = self self.__property_watch = {} self.__property_watch_lastid = 0 self.__commands_cb = {} self.__commands_cb_lastid = 0 for p in self.factory.handler.PROPERTIES: self.observe_property(p) self.factory.handler.get_property(p) if self.factory.handler.starting: self.factory.handler.starting.callback(True)
def __init__(self, file): core.AVContainer.__init__(self) self._references = [] self.mime = 'video/quicktime' self.type = 'Quicktime Video' h = file.read(8) try: (size, type) = struct.unpack('>I4s', h) except struct.error: # EOF. raise ParseError() if type == b'ftyp': # file type information if size >= 12: # this should always happen if file.read(4) != b'qt ': # not a quicktime movie, it is a mpeg4 container self.mime = b'video/mp4' self.type = b'MPEG-4 Video' size -= 4 file.seek(size - 8, 1) h = file.read(8) (size, type) = struct.unpack('>I4s', h) while type in [b'mdat', b'skip', b'free']: # movie data at the beginning, skip # Extended size if size == 1: (size, ) = struct.unpack('>Q', file.read(8)) size -= 8 file.seek(int(size) - 8, 1) h = file.read(8) (size, type) = struct.unpack('>I4s', h) if not type in [b'moov', b'wide']: log.debug('invalid header: %r' % type) raise ParseError() # Back over the atom header we just read, since _readatom expects the # file position to be at the start of an atom. file.seek(-8, 1) while self._readatom(file): pass if self._references: self._set('references', self._references)
def _parseSTRF(self, t, strh): fccType = strh['fccType'] retval = {} if fccType == b'auds': (retval['wFormatTag'], retval['nChannels'], retval['nSamplesPerSec'], retval['nAvgBytesPerSec'], retval['nBlockAlign'], retval['nBitsPerSample'], ) = struct.unpack('<HHHHHH', t[0:12]) ai = core.AudioStream() ai.samplerate = retval['nSamplesPerSec'] ai.channels = retval['nChannels'] # FIXME: Bitrate calculation is completely wrong. # ai.samplebits = retval['nBitsPerSample'] # ai.bitrate = retval['nAvgBytesPerSec'] * 8 # TODO: set code if possible # http://www.stats.uwa.edu.au/Internal/Specs/DXALL/FileSpec/\ # Languages # ai.language = strh['wLanguage'] ai.codec = retval['wFormatTag'] self.audio.append(ai) elif fccType == b'vids': v = struct.unpack('<IIIHH', t[0:16]) (retval['biSize'], retval['biWidth'], retval['biHeight'], retval['biPlanes'], retval['biBitCount'],) = v v = struct.unpack('IIIII', t[20:40]) (retval['biSizeImage'], retval['biXPelsPerMeter'], retval['biYPelsPerMeter'], retval['biClrUsed'], retval['biClrImportant'],) = v vi = core.VideoStream() vi.codec = t[16:20] vi.width = retval['biWidth'] vi.height = retval['biHeight'] # FIXME: Bitrate calculation is completely wrong. # vi.bitrate = strh['dwRate'] vi.fps = float(strh['dwRate']) / strh['dwScale'] log.debug("Define video length : strh['dwLength'] / vi.fps") vi.length = strh['dwLength'] / vi.fps self.video.append(vi) return retval
def __update_media(self, attrs): session = Session() video_or_song = with_polymorphic(Media, [Song, Video]) m_obj = Session.query(video_or_song)\ .filter(Media.m_id == self.media["m_id"])\ .one_or_none() if m_obj is None: log.debug('media with id %d is gone, was probably deleted. ' 'Ignoring media update.' % self.media["m_id"]) return for key in attrs: setattr(m_obj, key, attrs[key]) self.media[key] = attrs[key] session.commit() Session.remove()
def __init__(self, start_inotify=True, library_update=True): super(DeejayDaemonCore, self).__init__() config = DeejaydConfig() self.player = player.init(config) self.put_sub_handler('player', self.player) self.audiolib, self.videolib, self.watcher = library.init(self.player, config) self.put_sub_handler('audiolib', self.audiolib) if self.videolib is not None: self.put_sub_handler('videolib', self.videolib) self.recpls = DeejaydRecordedPlaylist(self.audiolib) self.put_sub_handler('recpls', self.recpls) # add audio queue/playlist and video playlist self.sources = sources.init(self.player, self.audiolib, self.videolib, config) for source in list(self.sources.sources.values()): self.put_sub_handler(source.name, source) setattr(self, source.name, source) # add webradio if player can play http stream if self.player.is_supported_uri("http"): self.webradio = DeejaydWebradio(self.player) self.put_sub_handler('webradio', self.webradio) else: log.err(_("Player is not able to play http streams")) self.webradio = None if library_update: self.audiolib.update() if self.videolib is not None: self.videolib.update() # enable JSON-RPC introspection self.put_sub_handler('introspection', JSONRPCIntrospection(self)) # start inotify thread when we are sure that all init stuff are ok if self.watcher and start_inotify: log.debug(_("Start inotify watcher")) self.watcher.start() # record changes and close session after the initialization Session.commit() Session.remove()
def process_track_common(self, elem, track): elem_id = elem.get_id() if elem_id == MATROSKA_TRACK_LANGUAGE_ID: track.language = elem.get_str() log.debug('Track language found: %r' % track.language) elif elem_id == MATROSKA_NAME_ID: track.title = elem.get_utf8() elif elem_id == MATROSKA_TRACK_NUMBER_ID: track.trackno = elem.get_value() elif elem_id == MATROSKA_TRACK_FLAG_ENABLED_ID: track.enabled = bool(elem.get_value()) elif elem_id == MATROSKA_TRACK_FLAG_DEFAULT_ID: track.default = bool(elem.get_value()) elif elem_id == MATROSKA_CODEC_ID: track.codec = elem.get_str() elif elem_id == MATROSKA_CODEC_PRIVATE_ID: track.codec_private = elem.get_data() elif elem_id == MATROSKA_TRACK_UID_ID: self.objects_by_uid[elem.get_value()] = track
def seek(self, pos, relative=False): try_seek_init = self.get_state() == PLAYER_STOP and \ self.__player.state['seekable'] is None if try_seek_init or \ (self.get_state() == PLAYER_PLAY and \ self.__player.state['seekable']): if relative: flag = 'relative' current = self.__player.state['time-pos'] else: flag = 'absolute' current = 0 self.__player.state['time-pos'] = current + pos self.__player.seek(pos, flag) self.__player.command('show-progress') elif not self.__player.state['seekable']: log.debug('mpv: seek on not seekable media ignored') elif self.get_state() == PLAYER_PAUSE: log.debug('mpv: seek on paused media ignored')
def process_elem(self, elem): elem_id = elem.get_id() log.debug('BEGIN: process element %r' % hex(elem_id)) if elem_id == MATROSKA_SEGMENT_INFO_ID: duration = 0 scalecode = 1000000.0 for ielem in self.process_one_level(elem): ielem_id = ielem.get_id() if ielem_id == MATROSKA_TIMECODESCALE_ID: scalecode = ielem.get_value() elif ielem_id == MATROSKA_DURATION_ID: duration = ielem.get_float_value() elif ielem_id == MATROSKA_TITLE_ID: self.title = ielem.get_utf8() elif ielem_id == MATROSKA_DATE_UTC_ID: timestamp = unpack('!q', ielem.get_data())[0] / 10.0 ** 9 # Date is offset 2001-01-01 00:00:00 (timestamp 978307200.0) self.timestamp = int(timestamp + 978307200) self.length = duration * scalecode / 1000000000.0 elif elem_id == MATROSKA_TRACKS_ID: self.process_tracks(elem) elif elem_id == MATROSKA_CHAPTERS_ID: self.process_chapters(elem) elif elem_id == MATROSKA_ATTACHMENTS_ID: self.process_attachments(elem) elif elem_id == MATROSKA_SEEKHEAD_ID: self.process_seekhead(elem) elif elem_id == MATROSKA_TAGS_ID: self.process_tags(elem) elif elem_id == MATROSKA_CUES_ID: self.has_idx = True log.debug('END: process element %r' % hex(elem_id)) return True
def walk_directory(self, walk_root, force=False): walk_root = os.path.join(self.root_path, walk_root) walk_root = walk_root.rstrip("/") walked_folders = [] # cache all medias recorded in database media_dict = {} for m in Session.query(self.OBJECT_CLASS).all(): media_dict[m.get_path()] = m for root, subdirs, files in pathutils.walk(walk_root): root = root.rstrip("/") log.debug('library: update crawling %s' % root) if os.path.basename(root).startswith("."): continue # skip hidden folder folder = self._get_folder(root) if folder is None: folder = LibraryFolder(library_id=self.library_id, path=root, name=os.path.basename(root)) Session.add(folder) self.parse_files(folder, files, media_dict, force) subdirs_obj = self.get_subdirs(folder, subdirs) for sd in subdirs_obj: sd.parent_folder = folder walked_folders.append(root) # clean library Session.query(LibraryFolder) \ .filter(LibraryFolder.library_id == self.library_id) \ .filter(LibraryFolder.path.notin_(walked_folders)) \ .delete(synchronize_session='fetch') if len(media_dict) > 0: db_media_ids = [media_dict[k].m_id for k in media_dict] erased_files = Session.query(self.OBJECT_CLASS) \ .filter(self.OBJECT_CLASS.m_id.in_(db_media_ids)) \ .all() for f in erased_files: Session.delete(f) Session.commit()
def _parseSTRL(self, t): retval = {} size = len(t) i = 0 while i < len(t) - 8: key = t[i:i + 4] sz = struct.unpack('<I', t[i + 4:i + 8])[0] i += 8 value = t[i:] if key == b'strh': retval['strh'] = self._parseSTRH(value) elif key == b'strf': retval['strf'] = self._parseSTRF(value, retval['strh']) else: log.debug("_parseSTRL: unsupported stream tag '%s'" % key) i += sz return retval, i
def _parseSTRH(self, t): retval = {} retval['fccType'] = t[0:4] log.debug("_parseSTRH(%s) : %d bytes" % (retval['fccType'], len(t))) if retval['fccType'] != b'auds': retval['fccHandler'] = t[4:8] v = struct.unpack('<IHHIIIIIIIII', t[8:52]) (retval['dwFlags'], retval['wPriority'], retval['wLanguage'], retval['dwInitialFrames'], retval['dwScale'], retval['dwRate'], retval['dwStart'], retval['dwLength'], retval['dwSuggestedBufferSize'], retval['dwQuality'], retval['dwSampleSize'], retval['rcFrame'],) = v else: try: v = struct.unpack('<IHHIIIIIIIII', t[8:52]) (retval['dwFlags'], retval['wPriority'], retval['wLanguage'], retval['dwInitialFrames'], retval['dwScale'], retval['dwRate'], retval['dwStart'], retval['dwLength'], retval['dwSuggestedBufferSize'], retval['dwQuality'], retval['dwSampleSize'], retval['rcFrame'],) = v self.delay = float(retval['dwStart']) / \ (float(retval['dwRate']) / retval['dwScale']) except (KeyError, IndexError, ValueError, ZeroDivisionError): pass return retval
def process_attachment(self, attachment): elements = self.process_one_level(attachment) name = desc = mimetype = "" data = None for elem in elements: elem_id = elem.get_id() if elem_id == MATROSKA_FILE_NAME_ID: name = elem.get_utf8() elif elem_id == MATROSKA_FILE_DESC_ID: desc = elem.get_utf8() elif elem_id == MATROSKA_FILE_MIME_TYPE_ID: mimetype = elem.get_data() elif elem_id == MATROSKA_FILE_DATA_ID: data = elem.get_data() # Right now we only support attachments that could be cover images. # Make a guess to see if this attachment is a cover image. if mimetype.startswith(b"image/") and "cover" in (name + desc).lower() and data: self.thumbnail = data log.debug('Attachment %r found' % name)
def process_event(self, ignore, filepath, mask, library, dir_path): # Raised events use real paths, and in the libraries, paths follow # symlinks on directories. Therefore, paths must be fixed to use # symlinks before being passed on to the library. This is why # the library dir_path is passed and used here. session = Session() filename = filepath.basename().decode("utf-8") fpath = os.path.join(dir_path, filename) log.debug("inotify: %s event on '%s'" % (twisted.internet.inotify.humanReadableMask(mask), fpath)) path, name = os.path.split(fpath) if mask & twisted.internet.inotify.IN_CREATE: if self.__isdir_event(mask)\ or self.__occured_on_dirlink(library, fpath): library.crawl_directory(path, name) elif mask & twisted.internet.inotify.IN_DELETE: if self.__isdir_event(mask)\ or self.__occured_on_dirlink(library, fpath): library.remove_directory(path, name) elif not self.__isdir_event(mask): library.remove_file(path, name) elif mask & twisted.internet.inotify.IN_MOVED_FROM: if not self.__isdir_event(mask): library.remove_file(path, name) else: library.remove_directory(path, name) elif mask & twisted.internet.inotify.IN_MOVED_TO: if not self.__isdir_event(mask): library.update_file(path, name) else: library.crawl_directory(path, name) elif mask & twisted.internet.inotify.IN_CLOSE_WRITE: library.update_file(path, name) session.commit() session.close()
def init(uri, debug=False): global DatabaseLock repository = os.path.join(os.path.dirname(__file__), "dbmigrate") last_version = migrate.versioning.api.version(url=uri, repository=repository) # create engine based on config log.debug("Connection to database %s" % uri) engine = create_engine(uri, echo=debug) # know if table as instantiated inspector = Inspector.from_engine(engine) if not inspector.get_table_names(): log.msg("Create database tables...") # create table import deejayd.db.models base = deejayd.db.models.Base base.metadata.create_all(engine) # init migrate version to the last migrate.versioning.api.version_control(url=uri, repository=repository, version=last_version) else: try: vers = migrate.versioning.api.db_version(url=uri, repository=repository) except Exception: # set db_version to 0 migrate.versioning.api.version_control(url=uri, repository=repository, version=0) vers = 0 if vers < last_version: log.msg("Upgrade database schema to %d" % last_version) migrate.versioning.api.upgrade(url=uri, repository=repository) # configure scoped session and init lock Session.configure(bind=engine) DatabaseLock = uri.startswith("sqlite") and threading.Lock() or FakeLock()