Exemple #1
0
    def read_ontology_nodes(channel_name, view, request, user):
        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])

        ontology = Ontology.objects.using(compendium.compendium_nick_name).get(id=request['values']['ontology_id'])
        if request['values']['text']:
            query_response = OntologyNode.objects.using(compendium.compendium_nick_name).filter(
                Q(ontology=ontology) &
                Q(original_id__icontains=request['values']['text'])
            )
        else:
            query_response = OntologyNode.objects.using(compendium.compendium_nick_name).filter(
                Q(ontology=ontology)
            )[:100]
        query_response = query_response
        total = query_response.count()
        nodes = []
        for g in query_response:
            gg = g.to_dict()
            gg['valid'] = True
            nodes.append(gg)

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'nodes': nodes,
                        'total': total
                    }
                }
            })
        })
Exemple #2
0
    def read_ontologies(channel_name, view, request, user):
        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = Ontology.objects.using(compendium.compendium_nick_name).filter(Q(name__icontains=request['filter'])) \
            .order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]

        ontologies = [g.to_dict(columns=True) for g in query_response]

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'ontologies': ontologies,
                        'total': total
                    }
                }
            })
        })
Exemple #3
0
def message_consumer(message):
    try:
        content = message.content['text']
        data = json.loads(content)
        if data['type'] == 'send_message':
            data = data['data']
            receiver = uuid.UUID(data['receiver'])
            receiver = Account.objects.get(id=receiver)
            channel = ActiveChannel.objects.filter(owner=receiver)
            if channel.exists():
                channel = Channel(channel.get().name)
                channel.send({
                    'text':
                    json.dumps({
                        'type': 'received_message',
                        'data': {
                            'sender': message.channel_session['owner'],
                            'message_type': data['message_type'],
                            'content': data['content'],
                        },
                    })
                })
            else:
                PendingMessage(receiver=receiver,
                               payload=data['content'].encode()).save()
    except:
        traceback.print_exc()
Exemple #4
0
    def read_groups(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']
        fields = ['id', 'name']

        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = UserGroup.objects.filter(Q(name__contains=request['filter'])) \
            .order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]

        groups = [{k: g.__dict__[k] for k in fields} for g in query_response]

        channel.send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'groups': groups,
                        'total': total
                    }
                }
            })
        })
Exemple #5
0
    def handle(self, *args, **options):
        c = Channel('docker_events', channel_layer=channel_layers['default'])
        cli = docker_cli()
        for container in cli.containers():
            workspaces = list(
                Project.objects.filter(container_id=container["Id"]))
            if len(workspaces):
                workspace = workspaces[0]
                c.send({
                    "user__pk": workspace.user.pk,
                    "status": container.get('State', 'Unknown'),
                    "workspace__pk": workspace.pk,
                })

        events = cli.events()
        for event in events:
            event = json.loads(event)
            skip = False
            for prefix in [u'exec_', u'kill', u'die']:
                if event.get('status', u'exec_default').startswith(prefix):
                    skip = True
                    break
            if not skip:
                workspaces = list(
                    Project.objects.filter(container_id=event["id"]))
                if len(workspaces):
                    workspace = workspaces[0]
                    print event
                    c.send({
                        "user__pk": workspace.user.pk,
                        "status": event["status"],
                        "workspace__pk": workspace.pk,
                    })
        events.close()
Exemple #6
0
    def read_compendium_types_channel(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']
        c_types = []
        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = CompendiumType.objects.filter(Q(name__contains=request['filter']) |
                                             Q(description__contains=request['filter'])).order_by(
            order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]
        for ct in query_response:
            c_types.append(ct.to_dict())

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'compendium_types': c_types,
                        'total': total
                    }
                }
            })
        })
Exemple #7
0
    def read_experiment_experiment_files(channel_name, view, request, user):
        channel = Channel(channel_name)

        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])
        experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values'])
        exp = experiment.to_dict()
        exp['status'] = Status.objects.using(compendium.compendium_nick_name). \
            get(name='entity_script_ready').to_dict()
        try:
            exp['status'] = experiment.assignedfile_set.all()[0].status.to_dict()
        except Exception as e:
            pass
        exp['parsing_details'] = [dict(list(assigned_file.to_dict().items()) +
                                       list({'status': exp['status']}.items()))
                                  for assigned_file in experiment.assignedfile_set.all()]

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'status': exp['status'],
                        'files': exp['parsing_details']
                    }
                }
            })
        })
Exemple #8
0
    def read_experiment(channel_name, view, request, user):
        channel = Channel(channel_name)

        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])

        Group("compendium_" + str(compendium.id) + "_" + str(request['values'])).add(channel)

        parsing_db = init_parsing(request['compendium_id'], request['values'])
        experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values'])
        parsing_experiment = ParsingExperiment.objects.using(parsing_db).get(experiment_fk=request['values'])
        n_samples = parsing_experiment.parsingsample_set.all().count()
        platforms = ",".join(list(set(
                [sample.platform.platform_access_id for sample in parsing_experiment.parsingsample_set.all()]
            )))
        status = 'importing' if experiment.status.name == 'experiment_raw_data_importing' else None
        if not status:
            for smp in experiment.sample_set.all():
                if smp.platform.status and smp.platform.status.name == 'platform_importing':
                    status = 'importing'

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'status': status,
                        'experiment': experiment.to_dict(),
                        'parsing_experiment': parsing_experiment.to_dict(),
                        'platforms': platforms,
                        'n_samples': n_samples
                    }
                }
            })
        })
Exemple #9
0
    def read_privileges(channel_name, view, request, user):
        channel = Channel(channel_name)

        if 'values' in request:
            req = json.loads(request['values'])
            group_permissions = UserGroup.objects.get(
                id=req['group_id']).permissions.all()
            db = CompendiumDatabase.objects.get(id=req['compendium_id'])
            selected = [
                gc.codename for gc in group_permissions.filter(
                    content_type__app_label=db.compendium_nick_name)
            ]
            permissions = command.consumers.GroupCompendiumPermission.get_all_permissions(
                selected)
        else:
            permissions = command.consumers.GroupCompendiumPermission.get_all_permissions(
            )

        channel.send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'privileges': permissions,
                        'total': len(permissions)
                    }
                }
            })
        })
Exemple #10
0
        def wrapped(*args, **kwargs):
            reply_channel_name = self.channel_layer.new_channel(
                '{}?'.format(uri))
            payload = {
                'func_path': func_path,
                'uri': uri,
                'args': args,
                'kwargs': kwargs,
                'reply_channel': reply_channel_name,
            }
            channel = Channel('wamp.events')
            channel.send(payload)

            d = Deferred()

            def cleanup(result):
                self.channels.remove(reply_channel_name)
                del self.reply_channels[reply_channel_name]
                self.log.info('result: {}'.format(result['total']))

            d.addCallback(cleanup)
            self.channels.add(reply_channel_name)
            self.reply_channels[reply_channel_name] = d

            yield d
Exemple #11
0
    def read_experiments(channel_name, view, request, user):
        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])

        if 'values' in request and request['values']['text']:
            query_response = Experiment.objects.using(compendium.compendium_nick_name).filter(
                Q(experiment_access_id__icontains=request['values']['text'])
            )
        else:
            query_response = Experiment.objects.using(compendium.compendium_nick_name).all()[:100]
        query_response = query_response
        total = query_response.count()
        exps = []
        for e in query_response:
            ex = e.to_dict()
            ex['valid'] = True
            ex['experiment_access_id_extended'] = e.experiment_access_id + ' - ' + e.experiment_name
            exps.append(ex)

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'experiments': exps,
                        'total': total
                    }
                }
            })
        })
Exemple #12
0
    def read_platform_preview(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])
        parsing_db = init_parsing(request['compendium_id'], request['values'])
        experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values'])
        parsing_experiment = ParsingExperiment.objects.using(parsing_db).get(experiment_fk=experiment.id)
        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        platform_ids = list(set([sample.platform_id for sample in parsing_experiment.parsingsample_set.all()]))
        query_response = ParsingPlatform.objects.using(parsing_db). \
            filter(id__in=platform_ids). \
            filter(Q(platform_name__contains=request['filter']) |
                   Q(description__contains=request['filter']) |
                   Q(platform_access_id__contains=request['filter'])
                   ).order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]
        platforms = []
        status = 'importing' if experiment.status.name == 'experiment_raw_data_importing' else None
        for platform in query_response:
            imported_platform = Platform.objects.using(compendium.compendium_nick_name).get(id=platform.platform_fk)
            if not status and imported_platform.status:
                status = 'importing' if imported_platform.status.name == 'platform_importing' else None
            plt = platform.to_dict()
            plt['experiment_id'] = experiment.id
            plt['reporter_platform'] = ''
            plt['is_imported'] = imported_platform.biofeaturereporter_set.count() > 0
            try:
                p_type = PlatformType.objects.using(compendium.compendium_nick_name). \
                    get(name=plt['platform_type'])
                plt['bio_feature_reporter_name'] = p_type.bio_feature_reporter_name
                plt['bio_features_reporter_fields'] = [
                    field.to_dict() for field in p_type.biofeaturereporterfields_set.all()
                ]
            except Exception as e:
                pass
            platforms.append(plt)

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'platforms': platforms,
                        'status': status,
                        'total': total
                    }
                }
            })
        })
Exemple #13
0
    def get_ontology_nodes(channel_name, view, request, user):
        channel = Channel(channel_name)
        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])

        values = None
        ontology_id = 0
        ontology = None
        if 'values' in request:
            values = json.loads(request['values'])
            ontology_id = values['ontology_id']
            ontology = Ontology.objects.using(compendium.compendium_nick_name).get(id=ontology_id)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        order = ''
        if request['ordering'] == 'DESC':
            order = '-'

        ordering_value = request['ordering_value']
        if ordering_value not in list(OntologyNode().__dict__.keys()):
            ordering_value = 'id'
        query_response = OntologyNode.objects.using(compendium.compendium_nick_name).filter(
            Q(ontology_id=ontology_id)&
            (Q(id__icontains=request['filter'])|
             Q(original_id__icontains=request['filter'])|
             Q(json__icontains=request['filter']))) \
            .order_by(order + ordering_value)
        total = query_response.count()
        query_response = query_response[start:end]

        columns = ontology.json['columns'] if ontology and ontology.json and 'columns' in ontology.json else []
        nodes = []
        for g in query_response:
            gg = g.to_dict()
            for c in columns:
                if c['data_index'] not in g.json:
                    continue
                if type(g.json[c['data_index']]) == list:
                    gg[c['data_index']] = ' '.join(g.json[c['data_index']])
                else:
                    gg[c['data_index']] = g.json[c['data_index']]
            nodes.append(gg)

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'nodes': nodes,
                        'total': total
                    }
                }
            })
        })
Exemple #14
0
def gameplay_continue_consumer(message):
    data = message.content
    username = data['username']
    nonce = data['nonce']
    reply_channel = Channel(data['reply'])
    room_id = cache.get('player-room:' + username)

    if room_id is None:
        reply_channel.send(reply_error(
            'You are not in room',
            nonce=nonce,
            type='gameplay-continue',
        ))
        return

    cont = data.get('continue', None)
    if cont is None:
        reply_channel.send(reply_error(
            'Continue is required',
            nonce=nonce,
            type='gameplay-continue',
        ))
        return

    if cont is False:
        reply_channel.send(response(
            {},
            nonce=nonce,
        ))
        return

    room = cache.get('room:' + room_id)
    continue_count = 0
    for player in room['players']:
        if player['username'] == username:
            if player['continue'] is True:
                reply_channel.send(reply_error(
                    'You already continued',
                    nonce=nonce,
                    type='gameplay-continue',
                ))
                return
            player['continue'] = True
        if player['continue'] is True:
            continue_count += 1

    reply_channel.send(response(
        {},
        nonce=nonce,
    ))
    cache.set('room:' + room_id, room)

    if continue_count == room['options']['player_number']:
        Group(room_id).send(event('gameplay-continue', {}))
        restart_room(room_id)
        Group(room_id).send(event('gameplay-restart', {}))
        Channel('gameplay-start').send({'room_id': room_id})
Exemple #15
0
def ws_add(message):
    print 'ws_add_terminal', message['path']
    workspace_id, terminal_id = get_ids(message)
    c = Channel('terminal.connect')
    c.send({
        'reply_channel': message.reply_channel.name,
        'user_id': message.user.id,
        'workspace_id': workspace_id,
        'terminal_id': terminal_id,
    })
def ws_add(message):
    print 'ws_add_terminal', message['path']
    workspace_id, terminal_id = get_ids(message)
    c = Channel('terminal.connect')
    c.send({
        'reply_channel': message.reply_channel.name,
        'user_id': message.user.id,
        'workspace_id': workspace_id,
        'terminal_id': terminal_id,
        })
Exemple #17
0
    def read_platforms(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        compendium = CompendiumDatabase.objects.get(
            id=request['compendium_id'])
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        # platforms
        query_response = Platform.objects.using(compendium.compendium_nick_name). \
            filter(Q(platform_name__icontains=request['filter']) |
                   Q(description__icontains=request['filter']) |
                   Q(platform_access_id__icontains=request['filter'])).order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]

        platforms = []
        for plt in query_response:
            p = plt.to_dict()
            module_name, class_name = '.'.join(plt.data_source.python_class.split('.')[:-1]), \
                plt.data_source.python_class.split('.')[-1]
            python_class = getattr(importlib.import_module(module_name),
                                   class_name)()
            p['platform_accession_base_link'] = python_class.platform_accession_base_link
            #experiments = set()
            #n_samples = 0
            #n_samples_imported = 0
            #for smp in plt.platform.get_queryset().all():
            #    if smp.rawdata_set.count() > 0:
            #        n_samples_imported += 1
            #    experiments.add(smp.experiment_id)
            #p['n_experiments'] = len(experiments)
            #p['n_samples'] = n_samples
            #p['n_samples_imported'] = n_samples_imported
            platforms.append(p)

        channel.send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'platforms': platforms,
                        'total': total
                    }
                }
            })
        })
Exemple #18
0
    def read_users(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']
        users = []
        fields = [
            'id', 'username', 'first_name', 'last_name', 'email', 'last_login',
            'date_joined', 'is_active', 'is_superuser'
        ]
        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = User.objects.filter(
            Q(username__contains=request['filter'])
            | Q(first_name__contains=request['filter'])
            | Q(last_name__contains=request['filter'])
            | Q(email__contains=request['filter'])).order_by(
                order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]
        for u in query_response:
            user = {
                k: u.__dict__[k]
                for k in fields if k != 'last_login' and k != 'date_joined'
            }
            if u.__dict__['last_login']:
                user['last_login'] = u.__dict__['last_login'].strftime(
                    '%Y-%m-%d %H:%M')
            if u.__dict__['date_joined']:
                user['date_joined'] = u.__dict__['date_joined'].strftime(
                    '%Y-%m-%d %H:%M')
            user['user_groups'] = [{
                'id': g.id,
                'name': g.name
            } for g in u.groups.all()]
            users.append(user)

        channel.send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'users': users,
                        'total': total
                    }
                }
            })
        })
Exemple #19
0
    def read_experiment_platform_files(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])
        experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values'])
        parsing_db = init_parsing(request['compendium_id'], request['values'])
        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = Platform.objects.using(compendium.compendium_nick_name). \
            filter(id__in=[s.platform.id for s in experiment.sample_set.all()]). \
            filter(Q(platform_name__contains=request['filter']) | Q(description__contains=request['filter'])
                   ).order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]
        platforms = []
        for platform in query_response:
            plt = platform.to_dict()
            reporter_platform_id = ParsingPlatform.objects.using(parsing_db).\
                get(platform_fk=platform.id).reporter_platform
            try:
                plt['reporter_platform'] = Platform.objects.using(compendium.compendium_nick_name).\
                    get(id=reporter_platform_id).platform_access_id
            except Exception as e:
                pass
            plt['status'] = Status.objects.using(compendium.compendium_nick_name). \
                get(name='entity_script_ready').to_dict()
            try:
                plt['status'] = platform.assignedfile_set.all()[0].status.to_dict()
            except Exception as e:
                pass
            plt['parsing_details'] = [assigned_file.to_dict() for assigned_file in platform.assignedfile_set.all()]
            platforms.append(plt)

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'platforms': platforms,
                        'total': total
                    }
                }
            })
        })
Exemple #20
0
    def read_compendia_channel(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']
        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = CompendiumDatabase.objects.filter(Q(compendium_name__contains=request['filter']) |
                                                           Q(compendium_nick_name__contains=request['filter']) |
                                                           Q(description__contains=request['filter']) |
                                                           Q(html_description__contains=request['filter'])).order_by(
            order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]

        compendia = []
        importe_mapping_file_types = []
        ontology_file_types = BaseParser.get_parser_classes()

        permitted_db = command.consumers.GroupCompendiumPermission.get_permitted_db(user)
        for db in query_response:
            if not user.is_staff and not user.is_superuser and db.compendium_nick_name not in permitted_db:
                continue
            compendium = db.to_dict()
            try:
                compendium['bio_features_fields'] = [bff.to_dict() for bff in
                                                     BioFeatureFields.objects.using(db.compendium_nick_name).all()]
                importe_mapping_file_types = [{'file_type': cls.FILE_TYPE_NAME} for cls in importers.importer_mapping[db.compendium_type.bio_feature_name]]
                ontology_file_types = [{"file_type": cls.FILE_TYPE_NAME} for cls in ontology_file_types]
            except Exception as e:
                pass
            compendium['ontology_file_types'] = ontology_file_types
            compendium['bio_feature_file_types'] = importe_mapping_file_types
            compendia.append(compendium)

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'compendia': compendia,
                        'total': total
                    }
                }
            })
        })
Exemple #21
0
    def read_sample_preview(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])
        parsing_db = init_parsing(request['compendium_id'], request['values'])
        experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values'])
        parsing_experiment = ParsingExperiment.objects.using(parsing_db).get(experiment_fk=experiment.id)
        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = ParsingSample.objects.using(parsing_db). \
            filter(experiment=parsing_experiment). \
            filter(Q(sample_name__contains=request['filter']) |
                   Q(description__contains=request['filter']) |
                   Q(platform__platform_access_id__contains=request['filter'])
                   ).order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]
        samples = []
        status = 'importing' if experiment.status.name == 'experiment_raw_data_importing' else None
        if not status:
            for smp in experiment.sample_set.all():
                if smp.platform.status and smp.platform.status.name == 'platform_importing':
                    status = 'importing'
        for sample in query_response:
            smp = sample.to_dict()
            smp['experiment_id'] = experiment.id
            smp['reporter_platform'] = Platform.objects.using(compendium.compendium_nick_name).\
                    get(id=sample.reporter_platform).to_dict()
            samples.append(smp)

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'samples': samples,
                        'status': status,
                        'total': total
                    }
                }
            })
        })
Exemple #22
0
 def handle(self, *args, **options):
     # Note the channel name is what is defined in
     # the routing.py file and I am explicitly getting
     # the "default" channel (defined in settings.py)
     c = Channel('websocket.receive',
                 channel_layer=channel_layers['default'])
     # I expect no data with this, but a dict is required
     c.send({
         "text": json.dumps({
             "action": "start_mytask",
             "job_name": "download",
         }),
         "reply_channel": "websocket.send!xxxxxxxxxxxx",
     })
Exemple #23
0
class ReplyChannel():
    def __init__(self, channel_id):
        self.reply_channel = Channel(channel_id)
    
    def send(self, event, data):
        """
        wraps the default send method with serialization
        """
        response = {
            "event": event,
            "data": data
        }
        self.reply_channel.send({
            "text": json.dumps(response)
        })
Exemple #24
0
    def read_bio_feature_anno(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        compendium = CompendiumDatabase.objects.get(
            id=request['compendium_id'])
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        # bio features
        query_response = BioFeature.objects.using(compendium.compendium_nick_name). \
            filter(Q(name__icontains=request['filter']) |
                   Q(description__icontains=request['filter'])).order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]

        bio_feature = []
        for bf in query_response:
            b = bf.to_dict()
            b['annotation'] = []
            for bfa in bf.biofeatureannotation_set.all():
                ontology = bfa.annotation_value.ontology_node.ontology
                node = bfa.annotation_value.ontology_node
                ann = node.to_dict()
                ann['ontology'] = ontology.to_dict()
                ann['columns'] = ontology.json['columns']
                ann['fields'] = node.json
                b['annotation'].append(ann)
            bio_feature.append(b)

        channel.send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'bio_feature': bio_feature,
                        'total': total
                    }
                }
            })
        })
Exemple #25
0
    def read_admin_options(channel_name, view, request, user):
        channel = Channel(channel_name)

        channel.send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'admin_options':
                        [o.to_dict() for o in AdminOptions.objects.all()]
                    }
                }
            })
        })
Exemple #26
0
    def read_script_tree(channel_name, view, request, user):
        channel = Channel(channel_name)

        path = os.path.dirname(parsing_scripts.__file__)
        path_hierarchy = file_system.path_hierarchy(path, base_path=path, name_filter=request['filter']) # get only subdirectories
        path_hierarchy = [d for d in path_hierarchy['children'] if not d['leaf']] # without files
        path_hierarchy.append({'leaf': True, 'path': '/README', 'text': 'README'})
        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': path_hierarchy
                }
            })
        })
Exemple #27
0
def gameplay_start_consumer(message):
    data = message.content
    room_id = data['room_id']

    room = cache.get('room:' + room_id)
    player_number = room['options']['player_number']

    cards = shuffled_card()

    # dealing
    if player_number == 5:
        cards_per_person = 10

    elif player_number == 6:
        cards_per_person = 8

    else:
        # this is unexpected exception, but can be validated before
        return

    for i in range(player_number):
        dealed_card = cards[:cards_per_person]
        room['players'][i]['cards'] = dealed_card

        reply_channel = Channel(room['players'][i]['reply'])
        data = {
            'cards': dealed_card,
        }
        reply_channel.send(event('gameplay-deal', data))

        del cards[:cards_per_person]

    room['game']['floor_cards'] = cards
    room['game']['state'] = RoomState.BIDDING
    room['game']['player_number'] = room['options']['player_number']

    cache.set('room:' + room_id, room)

    # send bidding event
    event_data = {
        'player': {
            'username': room['players'][0]['username'],
        }
    }
    Group(room_id).send(event('gameplay-bidding', event_data))
def room_ready_consumer(message):
    data = message.content
    reply_channel = Channel(data['reply'])
    nonce = data['nonce']
    username = data['username']
    ready = data.get('ready', None)

    if ready is None:
        reply_channel.send(
            reply_error('No ready', nonce=nonce, type='room-ready'))

    player_room_cache_key = 'player-room:' + username

    with cache.lock('lock:' + player_room_cache_key):
        room_id = cache.get(player_room_cache_key)

        if room_id is None:
            reply_channel.send(
                reply_error('You are currently not in the room',
                            nonce=nonce,
                            type='room-ready'))
            return

    room_cache_key = 'room:' + room_id
    with cache.lock('lock:' + room_cache_key):
        room_cache = cache.get(room_cache_key)

        found = False
        for i, player in enumerate(room_cache['players']):
            if player['username'] == username:
                room_cache['players'][i]['ready'] = ready
                found = True
                break

        if not found:
            reply_channel.send(
                reply_error('You are currently not in the room',
                            nonce=nonce,
                            type='room-ready'))
            return

        cache.set(room_cache_key, room_cache)

        response_data = {
            'ready': ready,
        }

        event_data = {
            'player': username,
            'ready': ready,
        }

        reply_channel.send(response(response_data, nonce=nonce))
        Group(room_id).send(event('room-ready', event_data))
Exemple #29
0
    def read_experiment_sample_files(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        compendium = CompendiumDatabase.objects.get(id=request['compendium_id'])
        experiment = Experiment.objects.using(compendium.compendium_nick_name).get(id=request['values'])
        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = Sample.objects.using(compendium.compendium_nick_name).\
            filter(experiment=experiment).\
            filter(Q(sample_name__contains=request['filter']) | Q(description__contains=request['filter'])
                ).order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]
        samples = []
        for sample in query_response:
            smp = sample.to_dict()
            smp['status'] = Status.objects.using(compendium.compendium_nick_name).\
                get(name='entity_script_ready').to_dict()
            try:
                smp['status'] = sample.assignedfile_set.all()[0].status.to_dict()
            except Exception as e:
                pass
            smp['parsing_details'] = [assigned_file.to_dict() for assigned_file in sample.assignedfile_set.all()]
            samples.append(smp)

        channel.send({
            'text': json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'samples': samples,
                        'total': total
                    }
                }
            })
        })
Exemple #30
0
    def read_bio_feature_reporter(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        compendium = CompendiumDatabase.objects.get(
            id=request['compendium_id'])
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        order = ''
        if request['ordering'] == 'DESC':
            order = '-'

        platform = Platform.objects.using(
            compendium.compendium_nick_name).get(id=request['values']['id'])
        # bio features
        query_response = BioFeatureReporter.objects.using(compendium.compendium_nick_name). \
            filter(platform=platform).filter(Q(name__icontains=request['filter']) |
                   Q(description__icontains=request['filter'])).order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]

        bio_feature_reporter = []
        for bfr in query_response:
            b = bfr.to_dict()
            for field in bfr.biofeaturereportervalues_set.all():
                b[field.bio_feature_reporter_field.name] = field.value
            bio_feature_reporter.append(b)

        channel.send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'bio_feature_reporter': bio_feature_reporter,
                        'total': total
                    }
                }
            })
        })
Exemple #31
0
    def read_sample_details(channel_name, view, request, user):
        channel = Channel(channel_name)

        start = 0
        end = None
        if request['page_size']:
            start = (request['page'] - 1) * request['page_size']
            end = start + request['page_size']

        compendium = CompendiumDatabase.objects.get(
            id=request['compendium_id'])
        experiment = Experiment.objects.using(
            compendium.compendium_nick_name).get(id=request['values'])
        order = ''
        if request['ordering'] == 'DESC':
            order = '-'
        query_response = Sample.objects.using(compendium.compendium_nick_name). \
            filter(experiment=experiment). \
            filter(Q(sample_name__icontains=request['filter']) |
                   Q(description__icontains=request['filter']) |
                   Q(platform__platform_access_id__icontains=request['filter'])
                   ).order_by(order + request['ordering_value'])
        total = query_response.count()
        query_response = query_response[start:end]
        samples = []

        for sample in query_response:
            smp = sample.to_dict()
            samples.append(smp)

        channel.send({
            'text':
            json.dumps({
                'stream': view,
                'payload': {
                    'request': request,
                    'data': {
                        'samples': samples,
                        'total': total
                    }
                }
            })
        })
def parse_mentions(message):

    try:
        comment = IssueComment.objects.get(pk=message.content.get('pk'))
    except IssueComment.DoesNotExist:
        logger.error('Comment not found', message)
        return

    mentions = comment.get_mentions()
    users = User.objects.filter(username__in=mentions).distinct()
    orgs = Organisation.objects.filter(short_name__in=mentions).distinct()

    user_ids = set()

    channel = Channel('notifications.send_comment_notification')

    # handle user name mentions
    for u in users:
        user_ids.add(u.pk)
        channel.send({
            'comment_pk': comment.pk,
            'user_pk': u.pk,
            'user_mention': True
        })

    # handle org name mentions
    for org in orgs:
        profiles = Profile.objects.filter(user__membership__org=org)\
            .exclude(user_id__in=user_ids)\
            .select_related('user')

        for p in profiles:
            u = p.user
            if u.pk not in user_ids:
                user_ids.add(u.pk)
                channel.send({
                    'comment_pk': comment.pk,
                    'user_pk': u.pk,
                    'organisation_mention': True,
                    'organisation_pk': org.pk,
                })
    return users
        def wrapped(*args, **kwargs):
            reply_channel_name = self.channel_layer.new_channel('{}?'.format(uri))
            payload = {
                'func_path': func_path,
                'uri': uri,
                'args': args,
                'kwargs': kwargs,
                'reply_channel': reply_channel_name,
            }
            channel = Channel('wamp.events')
            channel.send(payload)

            d = Deferred()

            def cleanup(result):
                self.channels.remove(reply_channel_name)
                del self.reply_channels[reply_channel_name]
                self.log.info('result: {}'.format(result['total']))
            d.addCallback(cleanup)
            self.channels.add(reply_channel_name)
            self.reply_channels[reply_channel_name] = d

            yield d
def ws_message(message):
    print 'terminal_input', message
    workspace_id, terminal_id = get_ids(message)
    if message['text'].startswith('@@RESIZE@@'):
        data = message['text'].split('@@RESIZE@@')[1].split(':')
        c = Channel('terminal.resize')
        c.send({
            'reply_channel': message.reply_channel.name,
            'user_id': message.user.id,
            'workspace_id': workspace_id,
            'terminal_id': terminal_id,
            'width': data[0],
            'height': data[1]
            })
    else:
        c = Channel('terminal.input')
        c.send({
            'reply_channel': message.reply_channel.name,
            'user_id': message.user.id,
            'workspace_id': workspace_id,
            'terminal_id': terminal_id,
            'input': message['text']
            })