class Account(ndb.Model, AccountBase): # If you get an error along the lines of: # >> metaclass conflict: the metaclass of a derived class must be a # >> (non-strict) subclass of the metaclasses of all its bases # you'll need the following line. # For more details see the "noconflict" module __metaclass__ = noconflict.classmaker() username = ndb.StringProperty(required=True) hashed_password = ndb.StringProperty(required=True) email = ndb.StringProperty(required=True) @classmethod def new(cls, username, hashed_password, email): return cls(username=username, hashed_password=hashed_password, email=email) @classmethod def by_username(cls, username): return cls.query(cls.username == username).get() @classmethod def by_id(cls, _id): return cls.get_by_id(_id) def id(self): return self.key.id() def save(self): return self.put()
class Application(Singleton): __metaclass__ = classmaker() def __init__(self, settings): if "methods" not in settings: settings["methods"] = [] if "attributes" not in settings: settings["attributes"] = [] # default max workers in unblock=True if "unblock_workers" not in settings: settings["unblock_workers"] = 5 settings["executor"] = ThreadPoolExecutor( max_workers=settings["unblock_workers"]) self.settings = settings def register_object(self, o): for method in self.settings["methods"]: o.add_method(method) for attribute in self.settings["attributes"]: # key, value o.add_attribute(attribute[0], attribute[1]) o.o_settings = self.settings
class EncodeBin(Base, gst.Bin): __metaclass__ = classmaker() def __init__(self, player, quality): gst.Bin.__init__(self) self.__player = player self.quality = quality self.audioconvert = gst.element_factory_make("audioconvert") # threads queue = gst.element_factory_make("queue") self.vorbisenc = gst.element_factory_make("vorbisenc") self.vorbisenc.set_property("quality", self.quality) self.vorbisparse = gst.element_factory_make('vorbisparse') self.add(queue, self.audioconvert, self.vorbisenc, self.vorbisparse) sink = queue.get_pad("sink") self.sink = gst.GhostPad("sink", sink) self.add_pad(self.sink) gst.element_link_many(queue, self.audioconvert, self.vorbisenc, self.vorbisparse) src = self.vorbisparse.get_pad("src") self.src = gst.GhostPad("src", src) self.add_pad(self.src) self.__player.pipe.add(self)
class ImmutableDict(ImmutableDictBase, Mapping): __metaclass__ = classmaker() def __init__(self, *args, **kwargs): self.__underlying = dict(*args, **kwargs) def __getitem__(self, key): return self.__underlying[key] def __iter__(self): return iter(self.__underlying) def __len__(self): return len(self.__underlying) def __hash__(self): try: return self.__hash except AttributeError: self.__hash = hash(frozenset(self.__underlying.iteritems())) return hash(self) def __repr__(self): return "ImmutableDict(%s)" % repr(self.__underlying) def __copy__(self): return type(self)(copy(self.__underlying)) def __deepcopy__(self, memo): return type(self)(deepcopy(self.__underlying, memo))
class SOAdetailAdmin(admin.ModelAdmin): __metaclass__ = classmaker( right_metas=(ModelAdminWithForeignKeyLinksMetaclass, )) list_display = [ 'sitename', 'link_to_soa', 'site_addr', 'call_sign', 'old_chan', 'channel', 'freq', 'bw', 'ppp_units', 'rsl_units', 'mod_units', 'stor_units' ] search_fields = ['call_sign', 'sitename', 'site_addr', 'city', 'band'] list_filter = ['soa'] form = autocomplete_light.modelform_factory(SOA_detail) actions = [export_as_csv]
class AnimatedQuaternion(Animable, Quaternion): __metaclass__ = classmaker() w = Animatable() x = Animatable() y = Animatable() z = Animatable() def set(self, Q): self.w = constant(Q.w) self.x = constant(Q.x) self.y = constant(Q.y) self.z = constant(Q.z) def set_transition(self, dt, extend="constant", method='linear'): self.w = animate(self.w, self.w, dt=dt, extend=extend, method=method) self.x = animate(self.x, self.x, dt=dt, extend=extend, method=method) self.y = animate(self.y, self.y, dt=dt, extend=extend, method=method) self.z = animate(self.z, self.z, dt=dt, extend=extend, method=method) def set_chained_transition(self, quaternions, dts, methods): assert len(quaternions) > 0 if isinstance(dts, list) or isinstance(dts, tuple): assert len(dts) == len(quaternions) else: dts = [dts] * len(quaternions) if isinstance(methods, list) or isinstance(methods, tuple): assert len(methods) == len(quaternions) else: methods = [methods] * len(quaternions) w_chain = [] prev_w = self.w x_chain = [] prev_x = self.x y_chain = [] prev_y = self.y z_chain = [] prev_z = self.z for quat, dt, method in zip(quaternions, dts, methods): w_chain.append(animate(prev_w, quat.w, dt=dt, method=method)) x_chain.append(animate(prev_x, quat.x, dt=dt, method=method)) y_chain.append(animate(prev_y, quat.y, dt=dt, method=method)) z_chain.append(animate(prev_z, quat.z, dt=dt, method=method)) prev_w = quat.w prev_x = quat.x prev_y = quat.y prev_z = quat.z self.w = chain(w_chain) self.x = chain(x_chain) self.y = chain(y_chain) self.z = chain(z_chain)
class AnimatedVector3(Animable, Vector3): __metaclass__ = classmaker() x = Animatable() y = Animatable() z = Animatable() def __init__(self, v, *args): if isinstance(v, Vector3): coor = tuple(v) else: coor = (v, ) + args super(AnimatedVector3, self).__init__(*coor) def set(self, V): self.x = constant(V.x) self.y = constant(V.y) self.z = constant(V.z) def set_transition(self, dt, extend='constant', method='linear'): self.x = animate(self.x, self.x, dt=dt, extend=extend, method=method) self.y = animate(self.y, self.y, dt=dt, extend=extend, method=method) self.z = animate(self.z, self.z, dt=dt, extend=extend, method=method) def set_chained_transition(self, positions, dts, methods): assert len(positions) > 0 if isinstance(dts, list) or isinstance(dts, tuple): assert len(dts) == len(positions) else: dts = [dts] * len(positions) if isinstance(methods, list) or isinstance(methods, tuple): assert len(methods) == len(positions) else: methods = [methods] * len(positions) x_chain = [] prev_x = self.x y_chain = [] prev_y = self.y z_chain = [] prev_z = self.z for pos, dt, method in zip(positions, dts, methods): x_chain.append(animate(prev_x, pos.x, dt=dt, method=method)) y_chain.append(animate(prev_y, pos.y, dt=dt, method=method)) z_chain.append(animate(prev_z, pos.z, dt=dt, method=method)) prev_x = pos.x prev_y = pos.y prev_z = pos.z self.x = chain(x_chain) self.y = chain(y_chain) self.z = chain(z_chain)
class GridFSSource(Base, gst.BaseSrc): __metaclass__ = classmaker() __gsttemplates__ = ( gst.PadTemplate( "src", gst.PAD_SRC, gst.PAD_ALWAYS, gst.caps_new_any() # gst.caps_from_string("application/ogg") # gst.caps_from_string("audio/x-raw-int, channels=2, endianness=1234, rate=44100, width=16, depth=16, signed=true") ), ) blocksize = 4096 fd = None def __init__(self, file_id): super(GridFSSource, self).__init__() self.__gobject_init__() self.curoffset = 0 self.file_id = file_id # doc = self.db.conn.files.File() fs = gridfs.GridFS(self.db) self.fd = fs.get(ObjectId(file_id)) # @asynchronous def do_create(self, offset, size): if offset != self.curoffset: self.fd.seek(offset, 0) # data = yield task(self.query, self.fd.read, self.blocksize) data = self.fd.read(self.blocksize) if data: self.curoffset += len(data) # respond(gst.FLOW_OK, gst.Buffer(data)) return gst.FLOW_OK, gst.Buffer(data) else: # respond(gst.FLOW_UNEXPECTED, None) return gst.FLOW_UNEXPECTED, None def __del__(self): self.fd.close()
class PriorityQueue(Singleton): __metaclass__ = classmaker() def __init__(self): self.default_priority = int(2**(struct.Struct('i').size * 8 - 1) - 1) self.queue = Queue.PriorityQueue() PriorityQueue.queue = self def put_with_priority(self, priority, value): self.queue.put([priority, value]) def put(self, value): # now = datetime.datetime.now() # self.default_value = int(time.mktime(now.timetuple())*1e3 + now.microsecond/1e3 * 1000) self.queue.put([self.default_priority, value]) def get(self): try: priority, value = self.queue.get(True) except Queue.Empty: priority, value = None, None return value
class StreamManagement(Base, Singleton): __metaclass__ = classmaker() @staticmethod def aspects(): def stream_to_unicode(*args, **kwargs): if "stream" in kwargs: kwargs["stream"] = unicode(kwargs["stream"]) return kwargs aspect1 = {"pointcut": ".*", "advise": {"before": stream_to_unicode}} def no_such_stream(*args, **kwargs): if kwargs["stream"] not in kwargs["self"].streams: kwargs["respond"]({"error": "No such stream"}) return Call.stop else: return Call.proceed aspect2 = { "pointcut": "^(?!((start_streaming))$).*", "advise": { "before": no_such_stream } } def stream_exists(*args, **kwargs): if kwargs["stream"] in kwargs["self"].streams: kwargs["respond"]({"error": "Stream exists"}) return Call.stop else: return Call.proceed aspect3 = { "pointcut": "^start_streaming$", "advise": { "before": stream_exists } } return [aspect1, aspect2, aspect3] def __init__(self, port): super(StreamManagement, self).__init__() self.streams = {} # one command at the time self.lock = BoundedSemaphore(value=1) local_ip = Helpers.ip.local_ip() public_ip = Helpers.ip.public_ip() self.db.servers.update( { "type": "pipeline", "local_ip": local_ip, "port": port }, { "level": float(0), "type": "pipeline", "local_ip": local_ip, "public_ip": public_ip, "port": port, "down": False }, upsert=True) server = self.db.servers.find_one( { "type": "pipeline", "local_ip": local_ip, "port": port }, { "_id": 1, }) # db = Db() # server = db.conn.Server.find_one({"type": "pipeline", "local_ip": local_ip, "port": port}) # if server == None: # server = db.conn.Server() # # server["level"] = float(0) # server["type"] = "pipeline" # server["local_ip"] = local_ip # server["public_ip"] = public_ip # server["port"] = port # server["down"] = False # server.save() self.pipeline_server_id = unicode(server["_id"]) PipelineLoadBalancer(self.pipeline_server_id).start() # self.db = db Helpers.globals.set_id_pipeline(server["_id"]) self.connect(handler=self.dump_dot_file, signal="dump_dot_file") self.connect(handler=self.playlist_update, signal="playlist_update") self.connect(handler=self.change_selection, signal="change_selection") self.connect(handler=self.next, signal="next") self.connect(handler=self.is_alive, signal="is_alive") self.connect(handler=self.scale_streaming, signal="scale") self.connect(handler=self.start_streaming, signal="start") self.connect(handler=self.print_playlist, signal="print_playlist") self.connect(handler=self.register_updates_observer, signal="register_updates_observer") self.connect(handler=self.notify_current_track, signal="notify_current_track") self.connect(handler=self.unregister_updates_observer, signal="unregister_updates_observer") self.connect(handler=self.update_buffer, signal="update_buffer") self.connect(handler=self.start_live, signal="start_live") self.connect(handler=self.stop_streaming, signal="stop") self.connect(handler=self.rescale_streaming, signal="rescale") self.connect(handler=self.__streamer_initialized, signal="streamer_initialized") @in_context([]) def __streamer_initialized(self, streamer, respond): logging.debug("__streamer_initialized(): New initialized streamer") self.streams[unicode(streamer.stream["_id"])] = streamer self.lock.release() yield task(self.query, self.db.streams.update, {"_id": streamer.stream["_id"]}, {"$set": { "status": "playing" }}, upsert=False, multi=False) respond('OK') @in_context(["StreamManagement"]) def help(self, respond): pr = "" with open('README.TXT', 'r') as content_file: lines = content_file.readlines() for line in lines: if line.startswith("#"): pr += line[1:] del lines print pr respond({"msg": pr}) @in_context(["StreamManagement"]) def start_streaming(self, stream, quality, respond): self.lock.acquire() stream = yield task(self.query, self.db.streams.find_one, {"_id": ObjectId(stream)}, { "_id": 1, "reencoding": 1, "user_id": 1, "name": 1, "description": 1, "genres": 1, "quality": 1, "default_program_id": 1 }) if stream["reencoding"]: result = yield task( self.call, StartStreaming(self, self.pipeline_server_id, stream, quality).run) else: result = yield task( self.call, StartEncodedStreaming(self, self.pipeline_server_id, stream).run) if "error" in result: self.lock.release() respond(result) @in_context(["StreamManagement"]) def is_alive(self, stream, respond): result = yield task( self.call, IsStreamingAlive(self, stream, streamer=self.streams[stream]).run) respond(result) @in_context(["StreamManagement"]) def playlist_update(self, stream, group, respond): result = yield task(self.call, self.streams[stream].scheduler.playlist_update, group=group) respond(result) @in_context(["StreamManagement"]) def change_selection(self, stream, respond): result = yield task(self.call, self.streams[stream].scheduler.change_selection) respond(result) @in_context(["StreamManagement"]) def next(self, stream, respond, fade_in=None, fade_out=None): result = yield task( self.call, NextTrack(streamer=self.streams[stream], fade_in=fade_in, fade_out=fade_out).run) respond(result) @in_context(["StreamManagement"]) def stop_streaming(self, stream, respond): self.lock.acquire() result = yield task( self.call, StopStreaming(self, stream, streamer=self.streams[stream]).run) self.lock.release() respond(result) @in_context(["StreamManagement"]) def scale_streaming(self, stream, respond, quality=None): result = yield task( self.call, ScaleStreaming(stream, quality=quality, streamer=self.streams[stream]).run) respond(result) @in_context(["StreamManagement"]) def rescale_streaming(self, stream, respond, stop=True): result = yield task( self.call, RescaleStreaming(stream, streamer=self.streams[stream], stop=stop, stream_servers=len( self.streams[stream].servers.items())).run) respond(result) @in_context(["StreamManagement"]) def start_live(self, stream, appsrc, respond, loop=None): result = yield task( self.call, LiveStreaming(stream, streamer=self.streams[stream], appsrc=appsrc, loop=loop).run) respond(result) @in_context(["StreamManagement"]) def register_updates_observer(self, stream, handler, respond): result = yield task( self.call, self.streams[stream].scheduler.register_updates_observer, handler=handler) respond(result) @in_context(["StreamManagement"]) def notify_current_track(self, stream, respond): result = yield task( self.call, self.streams[stream].scheduler.buffer.notify_current_track) respond(result) @in_context(["StreamManagement"]) def unregister_updates_observer(self, stream, handler, respond): result = yield task( self.call, self.streams[stream].scheduler.unregister_updates_observer, handler=handler) respond(result) @in_context(["StreamManagement"]) def update_buffer(self, stream, buffer, respond): result = yield task(self.call, self.streams[stream].scheduler.update_buffer, buffer=buffer, unblock=True) respond(result) @in_context(["StreamManagement"]) def dump_dot_file(self, stream, respond): result = yield task(self.call, self.streams[stream].dump_dot_file, unblock=True) respond({"msg": "OK", "result": result}) @in_context(["StreamManagement"]) def print_playlist(self, stream, respond): result = self.streams[stream].scheduler.print_playlist() respond(result) @in_context(["StreamManagement"]) def run_command(self, command, respond): terminal = Terminal(self) result = terminal.parse_and_execute(command) respond(result) def __results_to_dict(self, results): res_arr = [] for result in results: res_arr.append(result) return res_arr
class StreamBin(Base, gst.Bin): __metaclass__ = classmaker() def __init__(self, player, source, originator, decodebin=False, nodatabinunlinker=False): super(StreamBin, self).__init__() gst.Bin.__init__(self) self.__player = player self.__originator = originator self.__source = source self.linked = False self.is_nodatabinunlinker = nodatabinunlinker self.state = None self.__adder_sink = None self.src_blocked = False self.emitted_playing_message = False self.__decoder_linked = False self.__error = None self.__error_id = None self.loop_id = None if decodebin: self.__decodebin = gst.element_factory_make("decodebin", None) self.__decodebin.connect("new-decoded-pad", self.__new_decodebin_pad_cb) else: self.__decodebin = VorbisDecodeBin() # oggdemux ! vorbisdec # self.__oggdemux = gst.element_factory_make("oggdemux", None) # self.__oggdemux.connect("pad-added", self.demuxer_callback) # self.__sourcequeue = gst.element_factory_make("queue", None) # self.__vorbisdec = gst.element_factory_make("vorbisdec", None) self.__audioconvert = gst.element_factory_make("audioconvert", None) if nodatabinunlinker: self.__nodatabinunlinker = NoDataBinUnlinker(unlink=self, tolerance=1) self.__audioresample = gst.element_factory_make("audioresample", None) self.__capsfilter = gst.element_factory_make("capsfilter", None) self.pos = Position(self.__player.stream) self.__volume = gst.element_factory_make("volume", None) self.__volume_control = gst.Controller(self.__volume, "volume") self.__volume_control.set_interpolation_mode("volume", gst.INTERPOLATE_LINEAR) # self.__loudness = Loudness(volume = self.__volume) self.__preroll = gst.element_factory_make("queue", None) self.__capsfilter.set_property("caps", self.__player.caps) self.__preroll.set_property("min-threshold-buffers", 10) self.add(self.__source, self.__decodebin, self.__audioconvert, self.__audioresample, self.__capsfilter, self.pos, self.__volume, self.__preroll) if nodatabinunlinker: self.add(self.__nodatabinunlinker) # self.__source.link(self.__decodebin) # gst.element_link_many(self.__source, self.__sourcequeue, self.__oggdemux) if decodebin: gst.element_link_many(self.__source, self.__decodebin) if nodatabinunlinker: gst.element_link_many(self.__audioconvert, self.__audioresample, self.__capsfilter, self.__nodatabinunlinker, self.__preroll, self.pos, self.__volume) else: gst.element_link_many(self.__audioconvert, self.__audioresample, self.__capsfilter, self.__preroll, self.pos, self.__volume) else: if nodatabinunlinker: gst.element_link_many(self.__source, self.__decodebin, self.__audioconvert, self.__audioresample, self.__capsfilter, self.__nodatabinunlinker, self.__preroll, self.pos, self.__volume) else: gst.element_link_many(self.__source, self.__decodebin, self.__audioconvert, self.__audioresample, self.__capsfilter, self.__preroll, self.pos, self.__volume) preroll_src = self.__volume.get_pad("src") self.src = gst.GhostPad("src", preroll_src) self.add_pad(self.src) # Add a padprobe to the src to catch the OES and other events self.src.add_event_probe(self.__src_event_cb) # Share the bus with the player self.set_bus(self.__player.pipe.get_bus()) def unset_volume_control(self): self.__volume_control.unset_all("volume") def demuxer_callback(self, demuxer, pad): print "DEMUX CALLBACK CALLED" pad.link(self.__vorbisdec.get_pad("sink")) self.__decoder_linked = True def get_next_in_loop(self): self.loop_ip = self.loop[(self.loop_position % len(self.loop)) - 1] self.loop_position += 1 return self.loop_id def loop(self, loop): self.loop_position = 0 self.loop = loop self.loop_id = self.get_next_in_loop() source = GridFSSource(self.loop_id) self.new_source(source) @asynchronous def stop_loop(self): self.loop_pos = 0 self.loop = None self.loop_id = None if len(self.__player.scheduler.live_bins) == 0: result = yield task(self.call, NextTrack(streamer=self.__player).run, unblock=False) def pause(self): self.__player.lock.acquire() if self.state != "paused": if self.is_nodatabinunlinker: self.__nodatabinunlinker.datawatcher.pause() self.unlink_and_dispose() self.state = "paused" self.__player.lock.release() def play(self): self.__player.lock.acquire() # if self.state != "playing": # self.link_and_unblock() # if self.is_nodatabinunlinker: # self.__nodatabinunlinker.datawatcher.play() # self.state = "playing" self.__player.lock.release() def reset(self): self.set_state(gst.STATE_NULL) self.set_state(gst.STATE_PLAYING) def new_source(self, source): self.__player.lock.acquire() self.set_state(gst.STATE_NULL) gst.element_unlink_many(self.__source, self.__decodebin) self.remove(self.__source) self.__decodebin.new_source() self.__source = source self.add(self.__source) gst.element_link_many(self.__source, self.__decodebin) # self.__source.set_state(gst.STATE_PLAYING) self.set_state(gst.STATE_PLAYING) # self.__player.pipe.set_state(gst.STATE_PLAYING) self.__player.lock.release() def start(self): logging.debug("StreamBin::start(): Starting stream") result = self.link_and_unblock() return result def get_postion(self): return self.pos.position # return self.__player.pipe.query_position(gst.FORMAT_TIME)[0] def get_time_postion(self): return self.pos.time_position def on_position(self, position, callback): self.pos.on_position(position, callback) def set_volume(self, position, volume): # # pos = self.__player.pipe.query_position(gst.FORMAT_TIME)[0]-(5*gst.SECOND) # if pos < 0: # pos = self.__player.pipe.query_position(gst.FORMAT_TIME)[0] # print pos self.__volume_control.set("volume", position, volume / 100) # self.__volume.set_property("volume", volume/100) def get_volume(self): return float(self.__volume.get_property("volume")) * 100 def get_source(self): return self.__source def link_and_unblock(self): if self.linked: return logging.debug( "StreamBin::link_and_unblock(): Linking and unblocking stream") if self.__adder_sink: return True # self.__player.sink_lock.acquire() # self.__player.sink_start() # self.__player.sink_lock.release() if not self.get_parent(): logging.debug( "StreamBin::link_and_unblock(): Adding stream to player pipeline" ) self.__player.pipe.add(self) self.__adder_sink = self.__player.adder.get_request_pad("sink%d") if not self.__adder_sink: logging.warning( "StreamBin::link_and_unblock(): Could not get adder_sink") return False try: self.src.link(self.__adder_sink) logging.debug( "StreamBin::link_and_unblock(): Stream src linked to adder_sink" ) except: self.__adder_sink = None logging.warning( "StreamBin::link_and_unblock(): Could not link with adder_sink" ) return False logging.debug("StreamBin::link_and_unblock(): Play") # self.src.set_blocked_async(False, self.__src_unblocked_cb_null) self.set_state(gst.STATE_PLAYING) self.__player.pipe.set_state(gst.STATE_PLAYING) self.linked = True self.state = "playing" return True # def unlink_and_dispose(self): # print self.set_state(gst.STATE_NULL) # print self.src.unlink(self.__adder_sink) # print self.__adder_sink.get_parent().release_request_pad(self.__adder_sink) # print self.__player.pipe.remove(self) # print self.__player.pipe.set_state(gst.STATE_PLAYING) def link_and_unblock2(self): self.set_state(gst.STATE_NULL) # self.src.unlink(self.__adder_sink) # self.__adder_sink = self.__player.adder.get_request_pad("sink%d") self.src.link(self.__adder_sink) self.set_state(gst.STATE_PLAYING) self.__player.pipe.set_state(gst.STATE_PLAYING) def unlink_and_dispose2(self): self.set_state(gst.STATE_NULL) self.src.unlink(self.__adder_sink) # self.__adder_sink.get_parent().release_request_pad(self.__adder_sink) self.set_state(gst.STATE_PLAYING) self.__player.pipe.set_state(gst.STATE_PLAYING) def unlink_and_dispose(self): if not self.linked: return logging.debug( "StreamBin::unlink_and_dispose(): Unlinking and disposing stream") sr = self.set_state(gst.STATE_NULL) if sr == gst.STATE_CHANGE_ASYNC: logging.debug( "StreamBin::unlink_and_dispose(): Setting stream to STATE_NULL in async" ) self.get_state(gst.CLOCK_TIME_NONE) logging.debug("StreamBin::unlink_and_dispose(): STATE_NULL set") if self.__adder_sink: self.src.unlink(self.__adder_sink) self.__adder_sink.get_parent().release_request_pad( self.__adder_sink) self.__adder_sink = None logging.debug("StreamBin::unlink_and_dispose(): Unlinked") if self.get_parent() and self.get_parent() == self.__player.pipe: self.__player.pipe.remove(self) logging.debug("StreamBin::unlink_and_dispose(): Removed from pipeline") self.__player.pipe.set_state(gst.STATE_PLAYING) self.linked = False # logging.debug("StreamBin::unlink_and_dispose(): Acquiring streams lock") # self.__player.streams_lock.acquire() # self.__player.streams.remove(self) # self.__player.streams_lock.release() # logging.debug("StreamBin::unlink_and_dispose(): Streams lock released. Removed from streams list.") def __new_decodebin_pad_cb(self, dbin, pad, islast): pad.link(self.__audioconvert.get_pad("sink")) self.__decoder_linked = True def __src_blocked_cb(self, pad, blocked): self.__preroll.set_property("min-threshold-buffers", 0) self.start() def __src_unblocked_cb_null(self, pad, blocked): pass @asynchronous def __src_event_cb(self, pad, event): if event.type == gst.EVENT_EOS: result = yield task(self.call, NextTrack(streamer=self.__player).run, unblock=True) def destruct(self): """ This has to be called from another thread. """ logging.debug("StreamBin::destruct(): Destructing stream") self.unlink_and_dispose() self.remove_many(self.__source, self.__oggdemux, self.__audioconvert, self.__audioresample, self.__capsfilter, self.__preroll) del self.__player # del self.uri del self.__adder_sink del self.src_blocked del self.emitted_playing_message del self.__source del self.__oggdemux del self.__audioconvert del self.__audioresample del self.__capsfilter del self.__preroll del self.src
class HTML5ModelForm(forms.ModelForm, HTML5Form): __metaclass__ = noconflict.classmaker()
class LatestRsl_v2Admin(admin.ModelAdmin): #change_list_template = "admin/change_list_filter_sidebar.html" __metaclass__ = classmaker( right_metas=(ModelAdminWithForeignKeyLinksMetaclass, )) form = LatestRsl_v2Form search_fields = [ '=rslno', 'logbook__controlNo', 'official_receipt__or_no', 'carrier__companyname', 'equipment__makemodel__make', 'form_serial', 'evaluator__code_name', 'encoder__code_name', 'signatory__code_name', 'sitename__street', 'sitename__site', 'sitename__address__city', 'sitename__address__province', 'sitename__address__regioncode', 'equipment__serialno', 'equipment__callsign' ] list_display = [ 'rslno', 'status', 'issued', 'link_to_carrier', 'link_to_sitename', 'sitename_street', 'sitename_province', 'link_to_logbook', 'form_serial', 'class_of_station', 'nature_of_service', 'ptsvc', 'remarks' ] date_hierarchy = 'issued' list_filter = [ 'carrier', 'class_of_station', 'status', DirectorListFilter, EngrListFilter, EncoderListFilter ] fieldsets = [ ('Logbook Info', { 'fields': (('logbook'), 'carrier') }), ('License Info', { 'classes': ('grp-collapse grp-open', ), 'fields': (('status', 'issued'), ('rslno', 'form_serial', 'capacity'), ('class_of_station', 'lic_to_operate', 'nature_of_service'), 'ptsvc') }), ('Site', { 'classes': ('grp-collapse grp-open', ), 'fields': ('sitename', 'sitename_street', ('sitename_city', 'sitename_province', 'sitename_region'), ('sitename_longitude', 'sitename_latitude')) }), ('Remarks Info', { 'classes': ('grp-collapse grp-open', ), 'fields': ('remarks', ('encoder', 'evaluator'), 'signatory') }) ] inlines = ( EquipmentInline, Official_ReceiptInline, ) actions = [export_as_csv] # when readonly_fields on saving is not possible #readonly_fields = ('sitename_street', 'sitename_province', 'sitename_region', 'lic_to_operate', 'sitename_city', 'sitename_latitude', 'sitename_longitude') def response_change(self, request, obj): res = super(LatestRsl_v2Admin, self).response_change(request, obj) if "next" in request.GET: return HttpResponseRedirect(request.GET['next']) else: return res def response_add(self, request, obj): res = super(LatestRsl_v2Admin, self).response_add(request, obj) if "next" in request.GET: return HttpResponseRedirect(request.GET['next']) else: return res class Media: #js = ['js/latestrsl_v2.js', ] css = { 'all': ['css/latestrsl_v2_css.css'], }
class Sequence_Thread_zmq(Sequence_logic, Sequence_comms_zmq, AbstractThread, metaclass=classmaker()): """docstring for Sequence_Thread""" # sig_aborted = pyqtSignal() sig_finished = pyqtSignal(str) sig_message = pyqtSignal(str) __name__ = "Sequence_now" def __init__( self, controlsLock=None, # comms_downstream, # comms_data, **kwargs, ): super().__init__(_ident="sequence", **kwargs) self._logger = logging.getLogger("CryoGUI." + __name__ + "." + self.__class__.__name__) # self.devices = device_signals # self.comms_data = comms_data # self.comms_downstream = comms_downstream self.controlsLock = Lock() if controlsLock is None else controlsLock # self.devices["Sequence"]["newconf"].connect(self.storing_thresholds) # @ExceptionHandling def work(self): """run the sequence, emit the finish-line""" # print('I will now start to work!') # print('data from main:' ,zmqquery(self.zmq_sSeq, 'data')) try: with PidFile("zmqLogger"): msg = "zmqLogger is not running, no data available, aborting" self._logger.error(msg) self.sig_finished.emit(msg) return except PidFileError: pass try: with self.controlsLock: fin = self.running() except problemAbort as e: fin = f"Error occurred, aborting sequence! Error: {e}" self._logger.error(fin) finally: try: self.sig_finished.emit(fin) except NameError: self.sig_finished.emit( "An Error occurred! Aborted sequence completely!") self._logger.error( "An Error occurred! Aborted sequence completely!") def message_to_user(self, message: str) -> None: """deliver a message to a user in some way default is printing to the command line may be overriden! """ # super().message_to_user(message) # print(message) # self.devices['general']['message_to_user'].emit(message) self.sig_message.emit(message) self._logger.warning(f"A message to the user: {message}") def execute_remark(self, remark: str, **kwargs) -> None: """use the given remark shoud be overriden in case the remark means anything""" try: if remark.strip()[:5] == "scanT": self._logger.debug("scan T explicitly") temps = [float(x) for x in mS.searchf_number.findall(remark)] self.execute_scan_T( start=temps[0], end=temps[-1], temperatures_forced=temps, Nsteps=None, SweepRate=None, ApproachMode="No O'Shoot", SpacingCode=None, commands=[{ "typ": "Wait", "Temp": True, "Field": False, "Position": False, "Chamber": False, "Delay": 60.0, "DisplayText": " Wait for Temperature & 60.0 seconds more", }], ) except IndexError: pass self.message_to_user(f"remark: {remark}") @pyqtSlot() def setTempVTIOffset(self, offset): self.temp_VTI_offset = offset
class EncodedStreamBin(Base, gst.Bin): __metaclass__ = classmaker() def __init__(self, player, source, originator): super(EncodedStreamBin, self).__init__() gst.Bin.__init__(self) self.__player = player self.__originator = originator # self.uri = uri self.source = source self.__demux = VorbisDemuxBin() self.__adder_sink = None self.src_blocked = False self.emitted_playing_message = False self.__decoder_linked = False self.__error = None self.__error_id = None self.add(self.source, self.__demux) gst.element_link_many(self.source, self.__demux) preroll_src = self.__demux.get_pad("src") self.src = gst.GhostPad("src", preroll_src) self.add_pad(self.src) # Add a padprobe to the src to catch the OES and other events self.src.add_event_probe(self.__src_event_cb) # Share the bus with the player # self.set_bus(self.__player.pipe.get_bus()) def new_source(self, source): self.__player.lock.acquire() self.set_state(gst.STATE_NULL) # self.unlink_and_dispose() gst.element_unlink_many(self.source, self.__demux) self.remove(self.source) self.__demux.new_source() self.source = source self.add(self.source) gst.element_link_many(self.source, self.__demux) self.set_state(gst.STATE_PLAYING) self.__player.lock.release() def reset(self): self.__player.lock.acquire() self.set_state(gst.STATE_NULL) self.set_state(gst.STATE_PLAYING) self.__player.lock.release() def link_and_unblock(self): """ This is used only at start changing source is done by new_source() """ logging.debug( "StreamBin::link_and_unblock(): Linking and unblocking stream") if self.__adder_sink: return True # self.__player.sink_lock.acquire() # self.__player.sink_start() # self.__player.sink_lock.release() if not self.get_parent(): logging.debug( "StreamBin::link_and_unblock(): Adding stream to player pipeline" ) self.__player.pipe.add(self) # 64 is default quality self.__adder_sink = self.__player.vorbisparse.get_pad("sink") if not self.__adder_sink: logging.warning( "StreamBin::link_and_unblock(): Could not get adder_sink") return False try: self.src.link(self.__adder_sink) logging.debug( "StreamBin::link_and_unblock(): Stream src linked to adder_sink" ) except: self.__adder_sink = None logging.warning( "StreamBin::link_and_unblock(): Could not link with adder_sink" ) return False logging.debug("StreamBin::link_and_unblock(): Unblocking src") self.src.set_blocked_async(False, self.__src_unblocked_cb_null) # self.set_state(gst.STATE_PLAYING) # self.__player.pipe.set_state(gst.STATE_PLAYING) return True def unlink_and_dispose(self): logging.debug( "StreamBin::unlink_and_dispose(): Unlinking and disposing stream") sr = self.set_state(gst.STATE_NULL) if sr == gst.STATE_CHANGE_ASYNC: logging.debug( "StreamBin::unlink_and_dispose(): Setting stream to STATE_NULL in async" ) self.get_state(gst.CLOCK_TIME_NONE) logging.debug("StreamBin::unlink_and_dispose(): STATE_NULL set") if self.__adder_sink: self.src.unlink(self.__adder_sink) # self.__adder_sink.get_parent().release_request_pad(self.__adder_sink) self.__adder_sink = None logging.debug("StreamBin::unlink_and_dispose(): Unlinked") if self.get_parent() and self.get_parent() == self.__player.pipe: self.__player.pipe.remove(self) logging.debug("StreamBin::unlink_and_dispose(): Removed from pipeline") # self.__player.pipe.set_state(gst.STATE_PLAYING) # logging.debug("StreamBin::unlink_and_dispose(): Acquiring streams lock") # self.__player.streams_lock.acquire() # self.__player.streams.remove(self) # self.__player.streams_lock.release() # logging.debug("StreamBin::unlink_and_dispose(): Streams lock released. Removed from streams list.") def __new_decodebin_pad_cb(self, dbin, pad, islast): pad.link(self.__audioconvert.get_pad("sink")) self.__decoder_linked = True def __src_blocked_cb(self, pad, blocked): self.__preroll.set_property("min-threshold-buffers", 0) # self.start() def __src_unblocked_cb_null(self, pad, blocked): pass @asynchronous def __src_event_cb(self, pad, event): if event.type == gst.EVENT_EOS: result = yield task(self.call, NextTrack(streamer=self.__player).run, unblock=True) def destruct(self): """ This has to be called from another thread. """ logging.debug("StreamBin::destruct(): Destructing stream") self.unlink_and_dispose()
class DamgaardJurikCiphertext(DamgaardJurikCiphertextBase, Integral): """Class representing the ciphertext in Damgaard-Jurik. Also represents the homomorphisms of Damgaard-Jurik""" __metaclass__ = classmaker() def __init__(self, c, key, cache=True, bucket_size=5): """Constructor: c: the ciphertext, represented as an integer type ns1: the exponentiated modulus used in generating this ciphertext cache: (optional) if True, we cache the powers of the ciphertext this speeds up the square-and-multiply exponentiation used if lots of homomorphic manipulation takes place, the default is True bucket_size: (optional) only has an effect if cache=True, number of bits per bucket in the cache of powers, default 5 """ if isinstance(c, bytes): c = bytes2int(c) elif isinstance(c, (Integral, mpz_type)): pass else: raise TypeError('Expected argument c to be an integer') if not isinstance(key, DamgaardJurik): raise TypeError( 'Expected argument key to be a DamgaardJurik instance') self.key = key s = int(ceil(log(int(c), int(self.key.n)) - 1)) ns1 = self.key.n**(s + 1) if has_gmpy: c = mpz(c) ns1 = mpz(ns1) self.c = c self.s = s self.ns1 = ns1 if bucket_size > 8: import warnings warnings.warn( "Setting bucket_size > 8 allows timing attacks based on Python's handling of small integers" ) self.bucket_size = bucket_size if cache: self.cache = [[ None for _ in xrange((2**self.bucket_size)) ] for __ in xrange( int(ceil(self.ns1.bit_length() / float(self.bucket_size))))] else: self.cache = None @property def c(self): return self.__c @c.setter def c(self, value): self.__c = value @property def key(self): return self.__key @key.setter def key(self, value): self.__key = value @property def s(self): return self.__s @s.setter def s(self, value): self.__s = value @property def ns1(self): return self.__ns1 @ns1.setter def ns1(self, value): self.__ns1 = value @property def cache(self): return self.__cache @cache.setter def cache(self, value): self.__cache = value def populate_cache(self): """When caching of powers is enabled, populate the cache as appropriate. If the cache is not enabled, raises RuntimeError """ if self.cache is None: raise RuntimeError( "Tried to populate the cache of a DamgaardJurikCiphertext instance without a cache" ) elif self.cache[0][1] is None: self.cache[0][1] = self.c base = self.c for i, bucket in enumerate(self.cache): if i != 0: bucket[1] = self.cache[i - 1][-1] bucket[1] *= base bucket[1] %= self.ns1 base = bucket[1] # assert base == pow(self.c, 2**(self.bucket_size*i), self.ns1) for j in xrange(2, len(bucket)): bucket[j] = bucket[j - 1] bucket[j] *= base bucket[j] %= self.ns1 # assert bucket[j] % self.ns1 == pow(self.c, 2**(self.bucket_size*i)*j, self.ns1) def wrap(self, other): """Convert an integer to a DamgaardJurikCiphertext instance with the same arguments as this instance. """ return type(self)(other, self.key, self.cache is not None, self.bucket_size) def convert(self, i): """Encrypt an integer with the same key as this instance""" # it doesn't matter that r is chosen using a bad RNG because it will # be combined with our r that is chosen using a good RNG return self.key.encrypt(DamgaardJurikPlaintext(i), s=self.s) def __repr__(self): return 'DamgaardJurikCiphertext(%d, %s, cache=%s, bucket_size=%d)' \ % (int(self.c), repr(self.key), self.cache is not None, self.bucket_size) def __str__(self): return int2bytes(self.c) def __add__(self, other): if isinstance(other, DamgaardJurikCiphertext): if self.key is not other.key or self.s != other.s or self.ns1 != other.ns1: raise ValueError( 'Cannot add ciphertexts that belong to different keys') return self.wrap(self.c * other.c % self.ns1) else: # other is a int or long other = self.convert(other) return self + other def __radd__(self, other): return self + other def __sub__(self, other): if isinstance(other, DamgaardJurikCiphertext): if self.key is not other.key or self.s != other.s or self.ns1 != other.ns1: raise ValueError( 'Cannot subtract ciphertexts that belong to different keys' ) return self.wrap(self.c * invert(other.c, self.ns1) % self.ns1) else: # other is a int or long other = self.convert(other) return self - other def __rsub__(self, other): if isinstance(other, DamgaardJurikCiphertext): if self.key is not other.key or self.s != other.s or self.ns1 != other.ns1: raise ValueError( 'Cannot subtract ciphertexts that belong to different keys' ) return self.wrap(other.c * invert(self.c, self.ns1) % self.ns1) else: # other is a int or long other = self.convert(other) return other - self def __mul__(self, other): if isinstance(other, DamgaardJurikCiphertext): raise TypeError( "It is nonsense to try to multiply ciphertexts. You can only multiply ciphertexts by normal integers" ) other %= self.ns1 if self.cache is None: return self.wrap(pow(self.c, other, self.ns1)) else: # perform the cache-accelerated exponentiation self.populate_cache() retval = 1 garbage = 1 for i, b in ((i, (other >> (i * self.bucket_size)) & ( (1 << self.bucket_size) - 1)) for i in xrange( int(ceil(other.bit_length() / float(self.bucket_size))))): j = random.randrange(1, len( self.cache[i])) # TODO: use a better random generator if b == 0: garbage *= self.cache[i][j] garbage %= self.ns1 if b != 0: retval *= self.cache[i][b] retval %= self.ns1 garbage = deepcopy(retval) return self.wrap(retval) def __rmul__(self, other): return self * other def __div__(self, other): if isinstance(other, DamgaardJurikCiphertext): raise TypeError( "It is nonsense to try to divide ciphertexts. You can only divide ciphertexts by normal integers" ) return self * invert(other, self.ns1) def __truediv__(self, other): return self.__div__(other) def __rdiv__(self, other): raise NotImplementedError def __rtruediv__(self, other): raise NotImplementedError def __mod__(self, other): raise NotImplementedError def __divmod__(self, other): raise NotImplementedError def __floordiv__(self, other): raise NotImplementedError def __rmod__(self, other): raise NotImplementedError def __rdivmod__(self, other): raise NotImplementedError def __rfloordiv__(self, other): raise NotImplementedError def __neg__(self): return self.wrap(invert(self.c, self.ns1)) def __pos__(self): return self def __hash__(self): return hash((int(self.c), self.key, int(self.s), int(self.ns1))) def __lt__(self, other): if isinstance(other, DamgaardJurikCiphertext): return self.c < other.c else: return self.c < other def __le__(self, other): return not self > other def __eq__(self, other): if isinstance(other, DamgaardJurikCiphertext): return self.c == other.c else: return self.c == other def __ne__(self, other): return not self == other def __gt__(self, other): if isinstance(other, DamgaardJurikCiphertext): return self.c > other.c else: return self.c > other def __ge__(self, other): return not self < other def __int__(self): return int(self.c) def __trunc__(self): return int(self) def __long__(self): return long(int(self)) def __float__(self): return float(int(self)) def __complex__(self): return complex(int(self)) def __and__(self, other): return NotImplemented def __xor__(self, other): return NotImplemented def __or__(self, other): return NotImplemented def __pow__(self, other): return NotImplemented def __lshift__(self, other): return NotImplemented def __rshift__(self, other): return NotImplemented def __rand__(self, other): return NotImplemented def __rxor__(self, other): return NotImplemented def __ror__(self, other): return NotImplemented def __rpow__(self, other): return NotImplemented def __rlshift__(self, other): return NotImplemented def __rrshift__(self, other): return NotImplemented def __iadd__(self, other): return NotImplemented def __isub__(self, other): return NotImplemented def __imul__(self, other): return NotImplemented def __idiv__(self, other): return NotImplemented def __itruediv__(self, other): return NotImplemented def __ifloordiv__(self, other): return NotImplemented def __imod__(self, other): return NotImplemented def __ipow__(self, other): return NotImplemented def __ilshift__(self, other): return NotImplemented def __irshift__(self, other): return NotImplemented def __iand__(self, other): return NotImplemented def __ixor__(self, other): return NotImplemented def __ior__(self, other): return NotImplemented def __abs__(self, other): return NotImplemented def __invert__(self, other): return NotImplemented