def encode(input_spec, chunksize=2**16): """input_spec tells ffmpeg how to find input Immediately yields a process handle, and from then on the generator gives data chunks (Admittedly the above is fairly wonky, but it helps avoid dealing with threading here...)""" cmd = ["./ffmpeg"] + input_spec + [ # Encode with the InterLace encoding standard "-r", "25", "-vf", 'scale=trunc(oh*a/4)*4:320', # Force size to multiple of four. "-threads", "8", "-f", "webm", "-c:v", "libvpx", "-b:v", "250K", # "-crf", "10", "-c:a", "libvorbis", "-" ] log(*cmd) p = subprocess.Popen(cmd, stdout=subprocess.PIPE, cwd=BASEDIR) yield p while True: chunk = p.stdout.read(chunksize) if len(chunk) == 0: return yield chunk
def doc_updated_type_uploaded_video(self, db, doc): if "upload" in doc.get("_attachments", {}): psychotherapist.log("upload processing started!") doc["type"] = "processing-video" db.save(doc) # Call in a thread so that our update calls go through and we can update the stream, &c reactor.callInThread(ingest.encode_from_upload, self, db, doc)
def run_forever(interlace): try: interlace.run_forever() except Exception, err: import traceback for line in traceback.format_exc().split('\n'): psychotherapist.log(line) import time time.sleep(20) sys.exit(1)
def update_stream(self, doc): psychotherapist.log("update_stream", self.db.name, doc["_id"]) if doc["_id"] not in self.docs: docstream = couchpotato.CouchWebMResource(self.db, doc) self.docs[doc["_id"]] = docstream self.putChild(doc["_id"], docstream) return docstream = self.docs[doc["_id"]] docstream.refresh(doc)
def db_updated(self, db_name): if db_name.startswith("_"): return psychotherapist.CouchTherapy.db_updated(self, db_name) if db_name not in self.streams.dbs: if db_name in self.server and db_name != self.doctors_log: reactor.callFromThread(self.streams.add_database, self.server[db_name]) else: psychotherapist.log("EEK! -- db not in server?", db_name)
def _really_put_file_attachment(db, doc, filepath): # The error handling on _really_put_attachment doesn't work well # for file handles, because if it fails, the seekhead will be at # the end of the file and we'll get an empty attachment. doc = db[doc["_id"]] try: db.put_attachment(doc, open(filepath)) except: log("another attachment errrrrr", filepath) doc = db[doc["_id"]] return _really_put_file_attachment(db, doc, filepath)
def _really_set_field(db, doc, key, value): "returns (_id, _rev) if field was set through our doing, (_id, None) otherwise" doc = db[doc["_id"]] while doc.get(key) != value: doc[key] = value try: return db.save(doc) except couchdb.ResourceConflict: log("_really_set_field", "ResourceConflict", key) doc = db[doc["_id"]] return (doc["_id"], None)
def _really_put_attachment(db, doc, *a, **kw): # XXX: This could be annoying for big uploads to oft-edited # documents. How to ameliorate? I think Couch allows putting # attachments without a _rev -- investigate. doc = db[doc["_id"]] try: db.put_attachment(doc, *a, **kw) except couchdb.ResourceConflict: return _really_put_attachment(db, doc, *a, **kw) except socket.error: log("SOCKET ERRRRRR") return _really_put_attachment(db, doc, *a, **kw)
def add_database(self, db): psychotherapist.log("add_database", db.name) dbstreams = DatabaseStreams(db) self.dbs[db.name] = dbstreams self.putChild(db.name, dbstreams) # Loop through all docs and add videos # XXX: using something like freud.js to keep docs in-memory would not be a bad idea... for doc_id in db: doc = db[doc_id] if doc.get("_attachments", {}).get("cluster-0"): dbstreams.update_stream(doc)