def __init__(self, inbox_queue): Channel.__init__(self, inbox_queue) self.auth = None self.api = None self.username = '******' self.listener = None self.stream = None self.__load_auth() self.__load_config()
def save(self, *args, **kwargs): super(TaskRun, self).save(*args, **kwargs) #should send progress (or all fields) to WebSocket!!! Channel('taskrun-channel').send({ 'taskrun_pk': self.pk, 'progress': self.progress })
def run(self, taskrun): time.sleep(10) Channel(self.channel).send({ 'taskrun_pk': taskrun.pk, 'message': self.message }) ################################################# # class RedisTask(Task): # pass # # class Meta: # verbose_name = "Procedura Redis" # verbose_name_plural = "Procedure Redis" # # def run(self,taskrun): # r = redis.StrictRedis.from_url('redis://localhost:6379/0') # r.publish('task-channel',{'taskrun_pk': taskrun.pk}) # p = r.pubsub() # p.subscribe('my-first-channel', 'my-second-channel', ...) # p.psubscribe('my-*', ...) # p.psubscribe('django-channels:PUB:prova1') # p.get_message() # for message in p.listen(): # print message
def _apply_event(self, event): # Apply a log event to the DBI context. if not isinstance(event, DBIEventBase): raise ValueError, "_apply_event: event must be of type EventBase" if isinstance(event, DBIEventChannelDump): return self.__apply_cdo(event) logging_event = None if isinstance(event, DBIEventChannelNew): self.__channels[event.channel_name] = Channel( name=event.channel_name, channel_source=ChannelSourceLogger(initial=event.sample)) logging_event = LoggingEventChannelNew( channel=copy(self.__channels[event.channel_name]), record=event.record_position) elif isinstance(event, DBIEventNewSample): self.__channels[event.channel_name].producer_set(event.sample) logging_event = LoggingEventNewSample(channel=copy( self.__channels[event.channel_name]), record=event.record_position) elif isinstance(event, DBIEventChannelRemove): logging_event = LoggingEventChannelRemove( channel=copy(self.__channels[event.channel_name]), record=event.record_position) del (self.__channels[event.channel_name]) else: raise ValueError, "_apply_event: unknown event %s" % ( event.__class__.__name__) self.__last_logging_event = logging_event self.__position = event.record_position
def post(self, request): form = MailMeForm(request.POST) if form.is_valid(): Channel('send_email').send({'payload': form.cleaned_data}) return HttpResponse('Good job!') else: return render(request, 'mailme.html', {'form': form})
def build_channel(channel, channel_kwargs, output, prefix, test=False): """ Builds the channel and outputs data """ # Creates data directory if it doesn't exist if not os.path.exists('data'): os.makedirs('data') # ================== CREATES CHANNEL AND EXPORTS TO FILE ================== out_file = 'data/' + prefix + '_data_ALL.json' if not os.path.isfile(out_file): print "Creating channel..." start = time() psi = channel(**channel_kwargs) end = time() - start print "Runtime = %dm %ds\n" % (end / 60, end % 60) print "Exporting channel to file..." psi.export_channel_to_file(out_file) # ======================== TESTS ALL STABS CORRECT ======================== if test: psi._test_all_non_trivial_combos_found(print_stabs=True) psi._test_combo_stabs_correct() # ======= REMOVES STABS WITHOUT SUPPORT ON INPUT AND OUTPUT QUBITS ======== out_file = 'data/' + prefix + '_data_JOINED.json' if not os.path.isfile(out_file): print "Removing stabilizers without support on I or O..." in_file = 'data/' + prefix + '_data_ALL.json' psi = Channel(filename=in_file) inputs = list(set(psi._support(psi.X_op)) | set(psi._support(psi.Z_op))) psi.update_inputs_and_outputs(inputs=inputs, outputs=[output], join=True) psi.export_channel_to_file(out_file)
def start(self): if self.max_run > 0 and self.running_count >= self.max_run: raise Exception(build_error_response(RESPERR.TOO_MANY_RUNS).data) taskrun = TaskRun.objects.create(task=self) try: if self. async: Channel('task-run').send({'taskrun_pk': taskrun.pk}) else: self.downcast.run(taskrun) except Exception, exc: taskrun.fail(build_exception_response().data)
def channel_add(self, channel_name, channel_source): if self.channel_exists(channel_name): raise ChannelAlreadyExists, "channel '%s' already exists" % \ (channel_name) channel = Channel(name=channel_name, channel_source=channel_source) self.__channels[channel_name] = channel self.__channel_publisher.new_channel(channel) return channel
def __apply_cdo(self, cdo): # Apply a channel dump object to the DBI context. if not isinstance(cdo, DBIEventChannelDump): raise ValueError, "_apply_event: event must be of type EventBase" self.__channels.clear() for channel_name in cdo.channel_dict: self.__channels[channel_name] = Channel( name=channel_name, channel_source=ChannelSourceLogger( initial=cdo.channel_dict[channel_name])) self.__position = cdo.record_position self.__last_logging_event = LoggingEventMeta( record=self.__position, description="Channel dump")
def terminal(request): if request.method == 'POST': command = request.POST['command'] server_id = request.POST['server_id'] print(server_id) print(command) # get the reply channel channel_reply = Server.objects.filter( id=server_id).values('channel_reply')[0]['channel_reply'] print('from views ' + channel_reply) Channel(channel_reply).send({"text": command}) context = {'object_list': 'abc'} return render(request, 'terminal.html', context)
def analyse_channel(prefix, i, o, max_weight, rel_weights, verbose=False, workers=1, mc_reps=1000): """ Performs a full analysis on the specified channel """ print('\n analyse_channel \n') # ======================= GETS MEASUREMENT PATTERNS ======================= # (higher weight => deeper search for measurement patterns) print("Finding measurement patterns...") in_file = 'data/' + prefix + '_data_JOINED.json' out_file = 'data/' + prefix + '_MW%d_MNT_PATS.json' % (max_weight) if not os.path.isfile(out_file): psi = Channel(filename=in_file) mnt_pats, qubit_key = psi.get_mnt_patterns(max_weight=max_weight, rel_weight=rel_weights) mnt_pats_key = {'mnt_pats': mnt_pats, 'qubit_key': qubit_key} with open(out_file, 'w') as fp: json.dump(mnt_pats_key, fp) else: mnt_pats = json.load(open(out_file, 'r')) mnt_pats, qubit_key = mnt_pats['mnt_pats'], mnt_pats['qubit_key'] # Turns JSON strings back to ints mnt_pats = {int(key): value for key, value in mnt_pats.items()} out_file = 'data/' + prefix + '_MW%d_LOSS_TOL_RAW.json' % (max_weight) if not os.path.isfile(out_file): loss_tol = get_loss_tolerance(mnt_pats, qubit_key) with open(out_file, 'w') as fp: json.dump(loss_tol, fp) # ================ IMPORTS RAW DATA AND FINDS ALL LOSS TOLS =============== print("Finding all loss tolerances...") in_file = 'data/' + prefix + '_MW%d_LOSS_TOL_RAW.json' % (max_weight) out_file = 'data/' + prefix + '_MW%d_SPF_LOSS_TOL_ALL.json' % (max_weight) if not os.path.isfile(out_file): loss_tols = import_loss_tols(in_file, filename=out_file) # ================ FINDS GRAPH PATHFINDING LOSS TOLERANCES ================ print("Finding graph pathfinding loss tolerances...") in_file = 'data/' + prefix + '_data_ALL.json' out_file = 'data/' + prefix + '_GPF_LOSS_TOL_ALL.json' if not os.path.isfile(out_file): data = json.load(open(in_file, 'r')) edges = data["edges"] graph = nx.Graph(edges) graph_loss_tols(graph, i, o, filename=out_file) # =============== GETS PER NODE LOSS TOLERANCE FOR HEATMAPS =============== print("Calculating per-node loss tolerances...") in_file = 'data/' + prefix + '_MW%d_SPF_LOSS_TOL_ALL.json' % (max_weight) out_file = 'data/' + prefix + '_MW%d_SPF_PER_NODE_TOL.csv' % (max_weight) if not os.path.isfile(out_file): all_tols = json.load(open(in_file, 'r')) per_node_tols = get_per_node_loss_tol(all_tols, filename=out_file) # ============== SIMULATES SPF LOSS TOLERANCE VIA MONTE CARLO ============= print("Simulating SPF loss tolerance...") in_file = 'data/' + prefix + \ '_MW%d_SPF_LOSS_TOL_ALL.json' % (max_weight) out_file = 'data/' + prefix + \ '_MW%d_%dMC_SPF_TEL_RATE.csv' % (max_weight, mc_reps) if not os.path.isfile(out_file): loss_probs = np.linspace(0, 1, 101) spf_loss_tols = json.load(open(in_file, 'r')) spf_data = heralded_loss_tel_mc(spf_loss_tols, qubit_key, loss_probs, mc_reps, filename=out_file, verbose=verbose) # ============== SIMULATES GPF LOSS TOLERANCE VIA MONTE CARLO ============= print("Simulating GPF loss tolerance...") in_file = 'data/' + prefix + '_GPF_LOSS_TOL_ALL.json' out_file = 'data/' + prefix + '_%dMC_GPF_TEL_RATE.csv' % (mc_reps) if not os.path.isfile(out_file): loss_probs = np.linspace(0, 1, 101) gpf_loss_tols = json.load(open(in_file, 'r')) gpf_data = heralded_loss_tel_mc(gpf_loss_tols, qubit_key, loss_probs, mc_reps, filename=out_file, verbose=verbose) # ===== CALCULATES PROPORTION OF LOSS CONFIGURATIONS SPF TOLERANT TO ====== print("Finding SPF configuration loss tolerance...") in_file = 'data/' + prefix + '_MW%d_SPF_LOSS_TOL_ALL.json' % (max_weight) out_file = 'data/' + prefix + '_MW%d_SPF_CONFIG_TOL.csv' % (max_weight) if not os.path.isfile(out_file): spf_loss_tols = json.load(open(in_file, 'r')) spf_data = get_qubit_no_loss_tolerances(spf_loss_tols, qubit_key, filename=out_file) # ===== CALCULATES PROPORTION OF LOSS CONFIGURATIONS GPF TOLERANT TO ====== print("Finding GPF configuration loss tolerance...") in_file = 'data/' + prefix + '_GPF_LOSS_TOL_ALL.json' out_file = 'data/' + prefix + '_GPF_CONFIG_TOL.csv' if not os.path.isfile(out_file): gpf_loss_tols = json.load(open(in_file, 'r')) gpf_data = get_qubit_no_loss_tolerances(gpf_loss_tols, qubit_key, filename=out_file) # ===================== GET UNHERALDED LOSS TOLERANCE ===================== print("Finding unheralded loss tolerances...") out_file = 'data/' + prefix + \ '_MW%d_%dMC_SPF_UH_MT_TEL_RATE.csv' % (max_weight, mc_reps) if not os.path.isfile(out_file): # Loads in measurement patterns and converts JSON strings to ints in_file = 'data/' + prefix + '_MW%d_MNT_PATS.json' % (max_weight) mnt_pats = json.load(open(in_file, 'r')) mnt_pats, qubit_key = mnt_pats['mnt_pats'], mnt_pats['qubit_key'] mnt_pats = {int(key): value for key, value in mnt_pats.items()} # Loads in loss tolerances in_file = 'data/' + prefix + \ '_MW%d_SPF_LOSS_TOL_ALL.json' % (max_weight) all_tols = json.load(open(in_file, 'r')) # Simulate "max-tolerance" measurement strategy performance strategy = 'max_tol' loss_probs = np.linspace(0, 1, 101) unheralded_loss_tel_mc(mnt_pats, all_tols, qubit_key, loss_probs, mc_reps, filename=out_file, workers=1, strategy=strategy, verbose=verbose)
def ws_connect(message): # accept reply_channel message.reply_channel.send({'accept': True}) # connection to DB conn = sqlite3.connect('db.sqlite3') cur = conn.cursor() # get path and add user in Group(path) path = message.content['path'].strip('/') Group(path).add(message.reply_channel) if (path == "chat"): # limit messages for show rows_limit = 100 cur.execute("SELECT COUNT() FROM chatapp_chat") curSize = cur.fetchone()[0] offset = (curSize - rows_limit) if ((curSize - rows_limit) > 0) else 0 t = (rows_limit, offset) cur.execute( "SELECT message, username, time FROM chatapp_chat LIMIT ? OFFSET ? ", t) else: # get id of privat chat chat_id = int(path[4:]) # set user_on = true and new_message = false in chat with correct chat_id user_id = message.user.id t = (1, 0, chat_id, user_id) cur.execute( "UPDATE chatapp_privat_chat_user SET user_on = ?, new_message = ? WHERE chat_id = ? and user_id =?", t) conn.commit() # search for messages in correct chat_id t = (chat_id, ) cur.execute( "SELECT message, username, time FROM chatapp_privat_chat WHERE chat_id = ?", t) # send massages from DB results = cur.fetchall() for res in results: try: time = datetime.strptime(res[2].rpartition('.')[0], "%Y-%m-%d %H:%M:%S") time = datetime.strftime(time, "%d.%m.%y %H:%M:%S") except: time = res[2] message.reply_channel.send({ 'text': json.dumps({ 'message': res[0], 'username': res[1], 'time': time }) }) if not message.user.id: conn.close() return # add reply_channel for user in DB channel_name = message.reply_channel.name user_id = message.user.id t = (user_id, channel_name) cur.execute( "INSERT INTO chatapp_reply_channel (user_id, reply_channel) VALUES (?, ?)", t) conn.commit() # Check for new messages in other chats t = (message.user.id, 1) cur.execute( "SELECT reply_channel FROM chatapp_privat_chat_user, chatapp_reply_channel WHERE " "chatapp_privat_chat_user.user_id = chatapp_reply_channel.user_id and " "chatapp_privat_chat_user.user_id = ? and new_message = ?", t) results = cur.fetchall() if (results): try: Channel(results[0][0]).send({ 'text': json.dumps({ 'message': "New message in another private chat", 'username': "******", 'time': 0 }) }) except: print("ERROR results =", results) conn.close()
def __init__(self, inbox_queue): Channel.__init__(self, inbox_queue) Updater.__init__(self, TELEGRAM_TOKEN) self.__load_handlers() self.username = '******' self.username_tag = '@{}'.format(self.username)
def tenant_multiplexer(stream, reply_channel): return TenantMultiplexer(stream, Channel(reply_channel[:]))
# coding: utf-8 from channels.channel import Channel from django.conf.urls import patterns, url, include from rest_framework.routers import SimpleRouter from tasks.views import DoitView, TaskView, TaskViewSet, TaskRunViewSet router = SimpleRouter() ##### Elenco endpoints ################################ router.register(r'tasks', TaskViewSet) router.register(r'taskruns', TaskRunViewSet) ##### Aggiunta degli url #################################### urlpatterns = patterns( '', url(r'^', include(router.urls)), ) ##### Views ################################ urlpatterns += patterns( '', url(r'^doit/$', DoitView.as_view(), name='doit'), url(r'^as_view/$', Channel('as_view').as_view(), name='as_view'), url(r'^taskrun$', TaskView.as_view(), name='taskrun'), )
def get(self,request): Channel('slow-channel').send({'task_name': request.path[1:]}) return HttpResponse("Hello world! You asked for {} with {}".format(request.path,request.method))
def ws_message(message): conn = sqlite3.connect('db.sqlite3') cur = conn.cursor() path = message.content['path'].strip('/') # add message to DB if (path == "chat"): t = (message.user.username, message.content['text'], datetime.now()) cur.execute( "INSERT INTO chatapp_chat (username, message, time) VALUES (?, ?, ?)", t) conn.commit() else: # get privat chat id chat_id = path[4:] chat_id = int(chat_id) t = (chat_id, message.user.username, message.content['text'], datetime.now()) # add message to DB with correct chat id cur.execute( "INSERT INTO chatapp_privat_chat (chat_id, username, message, time) VALUES (?, ?, ?, ?)", t) conn.commit() # set new_message = true for users with user_on=false t = (1, chat_id, 0) cur.execute( "UPDATE chatapp_privat_chat_user SET new_message = ? WHERE chat_id = ? and user_on = ?", t) conn.commit() # send "new message" for users with new_messages = true t = ( chat_id, 1, ) cur.execute( "SELECT reply_channel FROM chatapp_privat_chat_user, chatapp_reply_channel WHERE chatapp_privat_chat_user.user_id = chatapp_reply_channel.user_id and chat_id = ? and new_message = ?", t) results = cur.fetchall() for res in results: try: Channel(res[0]).send({ 'text': json.dumps({ 'message': "New message in another private chat", 'username': "******", 'time': 0 }) }) except: print("ERROR Channel name must be a valid unicode string") Group(path).send({ 'text': json.dumps({ 'message': message.content['text'], 'username': message.user.username, 'time': datetime.strftime(datetime.now(), "%d.%m.%y %H:%M:%S") }) }) conn.close()