Ejemplo n.º 1
0
def send_wait_serial(data):
    app.logger.info(f'Sending {data} to PLC')
    if app.use_flask_serial:
        plc_ser.on_send(data + '\r\n')
    else:
        plc_ser.send_data(data)
        socketio.start_background_task(wait_for_start)
Ejemplo n.º 2
0
    def on_background_start(self, data):
        _id = session['socket_id']
        try:
            if running_process[_id] is False:
                running_process[_id] = True
                socketio.start_background_task(background_thread, _id, data)
        except KeyError:
            running_process[_id] = True
            socketio.start_background_task(background_thread, _id, data)

        emit('connection_response', {
            'data': 'Connected',
            'id': session['socket_id']
        })
Ejemplo n.º 3
0
    def create_game(self, game_id, score_max_distance, max_response_time, leaderboard_answer_count,
                    between_turns_duration):
        app.logger.debug('Starting a new game with the id {game_id}'.format(game_id=game_id))

        # Create the game
        game = Game(game_id, score_max_distance, max_response_time, leaderboard_answer_count, between_turns_duration)

        # Register it
        self.games.append(game)

        # Start the game
        socketio.start_background_task(target=self.run_game, game=game)

        return game
Ejemplo n.º 4
0
    def create_game(self, game_id, score_max_distance, max_response_time, leaderboard_answer_count,
                    between_turns_duration):
        app.logger.debug('Starting a new game with the id {game_id}'.format(game_id=game_id))

        # Create the game
        game = Game(game_id, score_max_distance, max_response_time, leaderboard_answer_count, between_turns_duration)

        # Register it
        self.games.append(game)

        # Start the game
        socketio.start_background_task(target=self.run_game, game=game)

        return game
Ejemplo n.º 5
0
def test_connect():
    app.data.console_queue.put("connected")
    app.data.console_queue.put(request.sid)
    if app.uithread == None:
        app.uithread = socketio.start_background_task(
            app.UIProcessor.start, current_app._get_current_object())
        app.uithread.start()

    if not app.data.connectionStatus:
        app.data.console_queue.put(
            "Attempting to re-establish connection to controller")
        app.data.serialPort.openConnection()

    socketio.emit("my response", {"data": "Connected", "count": 0})
    address = app.data.hostAddress
    data = json.dumps({"hostAddress": address})
    print(data)
    socketio.emit(
        "message",
        {
            "command": "hostAddress",
            "data": data,
            "dataFormat": "json"
        },
        namespace="/MaslowCNC",
    )
    if app.data.pyInstallUpdateAvailable:
        app.data.ui_queue1.put("Action", "pyinstallUpdate", "on")
Ejemplo n.º 6
0
    def reload_status():

        ecoe_configs = Manager.get_ecoe_config_files()

        for ecoe_config in ecoe_configs:
            # 1. Create configuration in app memory
            ecoe = Manager.create_config(ecoe_config)

            # 2. Load objects
            for e_round in ecoe.rounds:
                try:
                    round_status = Manager.load_status_from_file(
                        e_round.status_filename)

                    if len(round_status) > 0:

                        chrono_status = Manager.load_status_from_file(
                            e_round.chrono.status_filename)

                        if len(chrono_status) > 0:
                            e_round.chrono.minutes = chrono_status['minutes']
                            e_round.chrono.seconds = chrono_status['seconds']
                            e_round.chrono.state = chrono_status['state']

                        ecoe.threads.append(
                            socketio.start_background_task(
                                target=e_round.start,
                                state=round_status['state'],
                                current_rerun=round_status['current_rerun'],
                                idx_schedule=round_status[
                                    'current_idx_schedule']))
                except:
                    pass
Ejemplo n.º 7
0
 def on_connect(self):
     global thread
     with thread_lock:
         if thread is None:
             thread = socketio.start_background_task(
                 target=background_thread)
     emit('my_response', {'data': 'Connected', 'count': 0})
Ejemplo n.º 8
0
def test_connect():
    global thread
    print('Client connected')
    if not thread.isAlive():
        thread_stop_event.clear()
        print("Starting Thread")
        thread = socketio.start_background_task(sendValues)
Ejemplo n.º 9
0
    def start(self):
        """
        start to listen to the serial port of the Arduino
        """
        # opening the serial
        if not self.is_open():
            s = self.open_serial()

        # configure the serial
        if self.setpoint:
            self.set_setpoint()
        time.sleep(0.2)
        if self.gain:
            self.set_gain()
        time.sleep(0.2)
        if self.integral:
            self.set_integral()
        time.sleep(0.2)
        if self.diff:
            self.set_differential()

        # starting the listener
        if not self.is_alive():
            self.switch = True
            db.session.commit()
            thread = socketio.start_background_task(target=do_work, id=self.id)
            self.thread_id = thread.ident
            db.session.commit()
            workers.append(thread)
        else:
            print('Already running')
Ejemplo n.º 10
0
    def start(self):
        """
        start to listen to the serial port of the Arduino
        """
        # test if everything is open
        if not self.is_open():
            print('No connection');
            return

        # configure the arduino

        if self.setpoint:
            self.set_setpoint();
        time.sleep(0.2);
        if self.gain:
            self.set_gain();
        time.sleep(0.2);
        if self.integral:
            self.set_integral();
        time.sleep(0.2);
        if self.diff:
            self.set_differential();

        # starting the listener
        if not self.is_alive():
            self.switch = True
            db.session.commit();
            thread = socketio.start_background_task(target=do_web_work, id = self.id, app = current_app._get_current_object());
            self.thread_str = str(thread.ident)
            db.session.commit()
            workers.append(thread);
        else:
            print('Already running')
Ejemplo n.º 11
0
def tool_connect():
    print('Client connected')
    global thread
    with thread_lock:
        if thread is None:
            thread = socketio.start_background_task(target=background_thread)
    emit("server_log", {'data': 'Connect'})
Ejemplo n.º 12
0
def solver():
  global current_question, current_game, timeout
  current_question = 0

  if "timer" in current_game:
    timeout = int(current_game["timer"]["timeout"])
    if( timeout >= 0):
        socketio.start_background_task(target=timer)

  try:
    import RPi.GPIO as GPIO
    GPIO.setmode(GPIO.BCM)
  except:
    response = "There was an error setting up GPIO."
    logging.error(response)
  
  logger.info("""Current game is: "{0}" """.format(current_game))
  current_qa = current_game['questions'][current_question]
  logger.info("""Current question is: "{0}" """.format(current_qa['question']))
  logger.info("""Current right answer is: "{0}" """.format(current_qa['answer']))

  #logger.info("timer started")
  #socketio.sleep(5)
  #logger.info("timer expired")
  #socketio.emit('next question', current_question +1, namespace='/test')
  #return

  questions_n = len(current_game['questions'])
  for n in range(0, questions_n):
    pin = current_qa['answer'][n]
    GPIO.setup(int(pin), GPIO.IN)
    
    while game_in_progress: 
      if GPIO.input(int(pin)) == True:
        logger.info("Detected the correct pin.")
        current_question = current_question + 1
        socketio.emit('next question', current_question, namespace='/test')
        break
      else:
        logger.info("Detected the wrong pin.")
        current_question = 0
        socketio.emit('lost', current_question, namespace='/test')
        return
    

  socketio.emit('win', current_question, namespace='/test')
  return
Ejemplo n.º 13
0
def registerPatientEvent(message):
    registrationToken = message["registrationToken"]
    windowToken = message["windowToken"]
    global thread
    with thread_lock:
        #if thread is None:
        thread = socketio.start_background_task(background_thread,
                                                registrationToken, windowToken)
Ejemplo n.º 14
0
def test_connect():
    global thread
    #TODO change the idea of sending the user's name
    name = tg_module.dia[-1].obj.name
    socketio.emit('info_responder', {'self_name': name}, namespace='/main')
    with locky:
        if thread is None:
            thread = socketio.start_background_task(target=tg_module.resender)
Ejemplo n.º 15
0
    def on_connect(self):
        global thread
        with thread_lock:
            if thread is None:
                thread = socketio.start_background_task(active911_thread, \
                            current_app._get_current_object())

        emit('my_response', {'data': 'Connected', 'count': 0})
def test_connect(message):
    global thread
    with thread_lock:
       if thread is None:
    
        thread = socketio.start_background_task(
            target = background_thread)
        emit('my_request', {'data': 'Connected', 'sid':request.sid})
Ejemplo n.º 17
0
def pilab_connect():
    ''' 
    Start the background thread upon connecting
    '''
    global thread
    with thread_lock:
        if thread is None:
            thread = socketio.start_background_task(target=background_thread)
Ejemplo n.º 18
0
def test_connect():
    global thread
    with thread_lock:
        if thread is None:
            thread = socketio.start_background_task(target=get_messages)

    emit('my_response', {'data': 'Connected', 'count': 0})
    print('connected')
Ejemplo n.º 19
0
def start_job_manager() -> None:
    """
    Starts the main job manager thread for the backend system.

    :return:
    """
    job_thread = socketio.start_background_task(target=_spawn_jobqueue)  # type: Thread
    job_thread.start()
Ejemplo n.º 20
0
def test_connect():
    print("----Client Connected------")
    global thread
    ms = {"Message": "Connected to server"}
    with thread_lock:
        if thread is None:
            thread = socketio.start_background_task(task_scheduler)
    emit('get_tasks_response', {"data": json.dumps(ms)})
Ejemplo n.º 21
0
 def on_connect(self):
     from app import socketio
     global thread
     emit("connected", "connected")
     socketio.sleep(5)
     if thread is None:
         with current_app.app_context():
             thread = socketio.start_background_task(
                 background_thread, socketio, request.sid)
Ejemplo n.º 22
0
def test_connect():
    # 单例模式创建线程,定时发布消息
    global thread1, thread2
    with lock:
        if thread1 is None:
            thread1 = socketio.start_background_task(target=background_task)
            thread2 = socketio.start_background_task(target=background_task2)

    # 未读信息
    if isinstance(current_user.is_anonymous, bool):
        return
    else:
        # 私信未读
        message = Message.query.filter_by(toId=current_user.id,
                                          hasRead=0).all()
        # feed
        # h = a-b if a>b else a+b
        emit('noreadmsg', {'data': '' if len(message) == 0 else len(message)})
Ejemplo n.º 23
0
def start_background(target, secs, resp_id):
    socket_thread = socketio.start_background_task(
        target=target, 
        socketio=socketio, 
        secs=secs,
        resp_id=resp_id
    )

    return socket_thread
Ejemplo n.º 24
0
def add_samples_by_csv():
    if request.method == 'OPTIONS':
        return jsonify({'status': 'Success'})

    @copy_current_request_context
    def bg_add_samples_by_csv(sample_file, project_id):
        task = 'add_samples'
        task_notifier = TaskNotifier(task=task, namespace=SOCK_NAMESPACE)
        task_notifier.emit_task_start()

        project = GenotypingProject.query.get(project_id)
        if not project:
            task_notifier.emit_task_failure(message="Genotyping Project Not Found. Restart Application.")

        sample_ids = []
        try:
            task_notifier.emit_task_progress(progress={
                'style': 'indeterminate',
                'total': 1,
                'current_state': 1,
                'message': 'Adding Samples...'
            })
            r = CaseInsensitiveDictReader(sample_file)

            for line in r:
                barcode = line['sample_id']
                sample = Sample.query.filter(Sample.barcode == barcode).first()
                if not sample:
                    task_notifier.emit_task_failure(message=f"Sample with barcode '{barcode}' does not exist.")
                    return
                sample_ids.append(sample.id)
            project.add_samples(sample_ids)
        except KeyError:
            task_notifier.emit_task_failure(message=f"File Malformed. Header must be 'Sample ID'")

        project.last_updated = datetime.utcnow()

        task_notifier.emit_task_success(message="Successfully Added Samples.")

    sample_file = request.files.getlist('files')[0]
    sample_file_stream = io.StringIO(sample_file.stream.read().decode("utf-8"), newline=None)
    project_id = request.form.get('project_id')
    socketio.start_background_task(bg_add_samples_by_csv, sample_file_stream, project_id)
    return jsonify({'status': 'Success'})
Ejemplo n.º 25
0
def log_connect():
    app.data.console_queue.put("connected to log")
    app.data.console_queue.put(request.sid)
    if app.logstreamerthread == None:
        app.logstreamerthread = socketio.start_background_task(
            app.LogStreamer.start, current_app._get_current_object()
        )
        app.logstreamerthread.start()

    socketio.emit("my response", {"data": "Connected", "count": 0}, namespace="/MaslowCNCLog")
Ejemplo n.º 26
0
def test_connect():
    print("connected")
    print(request.sid)
    if app.uithread == None:
        app.uithread = socketio.start_background_task(
            app.UIProcessor.start, current_app._get_current_object()
        )
        app.uithread.start()

    socketio.emit("my response", {"data": "Connected", "count": 0})
Ejemplo n.º 27
0
def test_connect():
    time_dict()
    global thread
    if thread is None:
        # Once any client is connected, the background_thread function starts in loop
        thread = socketio.start_background_task(target=background_thread)

    socketio.emit('my_response',
                  {'data': 'Message from server', 'time': time},
                  namespace='/test')
Ejemplo n.º 28
0
def unload():
    print('close')
    sid = int(request.headers['Referer'].split('_')[2])
    cid = current_user.id
    room = get_room(sid, cid)
    out_room(room, sid, cid)
    # save massage to file
    global thread
    with thread_lock:
        if thread is None:
            thread = socketio.start_background_task(msg_to_file, room)
Ejemplo n.º 29
0
def watchdog_connect():
    app.data.console_queue.put("watchdog connected")
    app.data.console_queue.put(request.sid)
    socketio.emit("connect", namespace="/WebMCP")
    if app.mcpthread == None:
        app.data.console_queue.put("going to start mcp thread")
        app.mcpthread = socketio.start_background_task(
            app.data.mcpProcessor.start, current_app._get_current_object())
        app.data.console_queue.put("created mcp thread")
        app.mcpthread.start()
        app.data.console_queue.put("started mcp thread")
Ejemplo n.º 30
0
def test_connect(message):
    """
    使用线程,后台异步执行推送
    :param message:
    :return:
    """
    global thread
    with thread_lock:
        if thread is None:
            thread = socketio.start_background_task(target=background_thread)
            emit('my_request', {'data': 'Connected', 'sid': request.sid})
Ejemplo n.º 31
0
def test_connect():
	global thread
	if thread is None:
		thread = socketio.start_background_task(target=background_thread)
	
	for c in current_user.chats:
		join_room(str(c.chat.id))
		string=readLog(str(c.chat.id))
		for s in string[1:-1]:
			mess = split("\t", s)
			#if mess[2] is not None:
			emit('my_response', {'data': mess[2], 'user': mess[0], 'room': str(c.chat.id), 'date':mess[1]})
Ejemplo n.º 32
0
def client_connect():
    global camera_streams_thread
    global display_thread

    host = current_app.config['FTP_HOST']
    user = current_app.config['FTP_USER']
    password = current_app.config['FTP_PASSWORD']

    with thread_lock:
        if display_thread is None:
            ftp_client = FtpClient(host, user, password)
            display_thread = socketio.start_background_task(
                display_images, current_app._get_current_object(), ftp_client)
        if camera_streams_thread is None:
            ftp_client = FtpClient(host, user, password)
            image_downloader = ImageDownloader()
            camera_streams_thread = socketio.start_background_task(
                camera_streams, current_app._get_current_object(), ftp_client,
                image_downloader)

    emit('server_event', {'data': 'Client connected', 'count': 0})
Ejemplo n.º 33
0
def connect():
    """new client connected"""

    if current_app.config['child_pid']:
        # already started child process, don't start another
        return

    # create child process attached to a pty we can read from and write to
    (child_pid, fd) = pty.fork()
    if child_pid == 0:
        # this is the child process fork.
        # anything printed here will show up in the pty, including the output
        # of this subprocess
        print('Using pid %i ...' % os.getpid())
        print('Starting subcommand %s ...' % current_app.config['CMD'])
        sys.stdout.flush()
        subprocess.run(current_app.config['CMD'])
        print('Exit the child!')
        sys.stdout.flush()
        #sys.exit(0)
        os._exit(0)  # quit the child process  leaves the process as zombie ...
    else:
        # this is the parent process fork.
        # store child fd and pid
        msg = 'Create a child process pid={}'.format(child_pid)
        current_app.logger.info(msg)

        # copy the data to the app context
        current_app.config['fd'] = fd
        current_app.config['child_pid'] = child_pid
        set_winsize(fd, 50, 50)
        #cmd = " ".join(shlex.quote(c) for c in current_app.config["cmd"])
        cmd = current_app.config['CMD']
        #print('child pid is', child_pid)
        #print(
        #    f'starting background task with command `{cmd}` to continously read '
        #    'and forward pty output to client'
        #)
        socketio.start_background_task(target=read_and_forward_pty_output,
                                       app=current_app._get_current_object())
Ejemplo n.º 34
0
 def start(self):
     """
     start to listen to the serial port of the Arduino
     """
     print('Starting the listener.')
     if not self.switch:
         self.switch = True
         db.session.commit();
         thread = socketio.start_background_task(target=do_work, cam_id = self.id);
         self.thread_id = thread.ident;
         db.session.commit()
         workers.append(thread);
     else:
         print('Already running')
Ejemplo n.º 35
0
def upload_samples():
    if request.method == 'OPTIONS':
        return jsonify({'status': 'Success'})

    @copy_current_request_context
    def bg_upload_samples(sample_file):
        task = 'upload_samples'
        task_notifier = TaskNotifier(task=task, namespace=SOCK_NAMESPACE)
        task_notifier.emit_task_start()

        with open(sample_file, 'r') as f:
            try:
                r = CaseInsensitiveDictReader(f)
                entries = list(r)
                id_field = next(_ for _ in ['barcode', 'sample_id', 'id'] if _ in r.fieldnames)
                designation_field = next(_ for _ in ['designation'] if _ in r.fieldnames)
                task_notifier.emit_task_progress(progress={
                    'style': 'determinate',
                    'total': len(entries),
                    'current_state': 1,
                    'message': 'Creating Samples...'
                })
                for idx, entry in enumerate(entries):
                    barcode = entry[id_field].strip()
                    designation = entry.get(designation_field, 'sample').strip()
                    if barcode:
                        sample = Sample(barcode=barcode, designation=designation)
                        db.session.add(sample)
                    task_notifier.emit_task_progress(progress={
                        'style': 'determinate',
                        'total': len(entries),
                        'current_state': idx + 1,
                        'message': 'Creating Samples...'
                    })
            except UnicodeDecodeError:
                task_notifier.emit_task_failure(
                    message="Sample File Malformed. Must be CSV File."
                )
                db.session.rollback()
                return
            except StopIteration:
                task_notifier.emit_task_failure(
                    message="Sample File Malformed. Sample Identifier Must be [('Barcode', 'Sample ID', or 'ID'), "
                            "'Designation']. "
                )
                db.session.rollback()
                return
            except (sqlite3.IntegrityError, sqlalchemy.exc.IntegrityError):
                task_notifier.emit_task_failure(message=f'{id_field} Must Be Unique.')
                db.session.rollback()
                return
            except AssertionError:
                task_notifier.emit_task_failure(
                    message="Sample File Malformed. Designation must be one of ['Negative Control', 'Positive "
                            "Control', 'Sample'] "
                )
            except Exception as e:
                # print(e)
                task_notifier.emit_task_failure(
                    message="Something Bad Happened Creating Samples... Restart App"
                )
                return
        task_notifier.emit_task_success()

    sample_file = request.files.getlist('files')[0]
    handle, tmpfile_path = tempfile.mkstemp()
    tmpfile = os.fdopen(handle, mode='wb')
    tmpfile.write(sample_file.stream.read())

    socketio.start_background_task(bg_upload_samples, tmpfile_path)
    return jsonify({'status': 'Success'})
Ejemplo n.º 36
0
def add_controls_by_csv():
    if request.method == 'OPTIONS':
        return jsonify({'status': 'Success'})

    @copy_current_request_context
    def bg_add_controls_by_csv(control_file, project_id):
        task = 'add_controls'
        task_notifier = TaskNotifier(task=task, namespace=SOCK_NAMESPACE)
        task_notifier.emit_task_start()

        project = QuantificationBiasEstimatorProject.query.get(project_id)
        if not project:
            task_notifier.emit_task_failure(
                message="Quantification Bias Estimator Project Not Found. Restart Application."
            )
            return

        task_notifier.emit_task_progress(progress={
            'style': 'indeterminate',
            'total': 1,
            'current_state': 1,
            'message': 'Associating Controls...'
        })
        r = CaseInsensitiveDictReader(control_file)
        control_map = {}

        for d in r:
            try:
                barcode = d.pop('barcode')
            except KeyError:
                task_notifier.emit_task_failure(message="CSV file malformed. Barcode column not found.")
                return

            if not barcode:
                task_notifier.emit_task_failure(message='CSV file malformed. Barcode field empty')
                return

            sample_id = Sample.query.filter(Sample.barcode == barcode).value(Sample.id)

            if not sample_id:
                task_notifier.emit_task_failure(message=f'Sample with barcode {barcode} does not exist.')
                return

            sample_already_present = db.session.query(
                ControlSampleAssociation.query
                    .join(ProjectSampleAnnotations)
                    .filter(ProjectSampleAnnotations.project_id == project_id,
                            ProjectSampleAnnotations.sample_id == sample_id)
                    .exists()
            ).scalar()

            if sample_already_present:
                task_notifier.emit_task_failure(message=f'Sample with barcode {barcode} already in project')
                return

            controls_and_props = d.values()
            controls_and_props = list(map(lambda _: _.strip().split(';'), controls_and_props))
            controls = []
            for control_and_prop in controls_and_props:
                if len(control_and_prop) > 1:
                    control, prop = control_and_prop
                    try:
                        prop = float(prop)
                    except ValueError:
                        task_notifier.emit_task_failure(
                            message=f"Failed to associate controls. {control} proportion field malformed."
                        )
                        return
                    try:
                        c = Control.query.filter(Control.barcode == control).filter(
                            Control.bin_estimator_id == project.bin_estimator_id).one()
                    except NoResultFound:
                        task_notifier.emit_task_failure(
                            message=f"Failed to find control {control}. "
                                    f"Make sure it exists for associated Bin Estimator."
                        )
                        return
                    controls.append((c.id, prop))
                    if controls:
                        control_map[sample_id] = controls
        sample_ids = control_map.keys()
        project.add_samples(sample_ids)
        # db.session.flush()
        sample_annotation_ids = project.sample_annotations.values(
            ProjectSampleAnnotations.id, ProjectSampleAnnotations.sample_id
        )
        for sa_id, sample_id in sample_annotation_ids:
            if sample_id in control_map:
                try:
                    project.assign_controls(sa_id, control_map[sample_id])
                except BadProportions:
                    db.session.rollback()
                    task_notifier.emit_task_failure(message="Control proportions do not sum to 1")
                    return
        task_notifier.emit_task_success(message="Successfully Added Samples.")

    control_file = request.files.getlist('files')[0]
    control_file_stream = io.StringIO(control_file.stream.read().decode('utf-8'), newline=None)
    project_id = request.form.get('project_id')
    socketio.start_background_task(bg_add_controls_by_csv, control_file_stream, project_id)
    return jsonify({'status': 'Success'})
Ejemplo n.º 37
0
def upload_plate_map():
    @copy_current_request_context
    def bg_upload_plate_map(plate_map_file, plate_id, create_non_existent_samples):
        socketio.sleep()
        task = 'upload_plate_map'
        task_notifier = TaskNotifier(task=task, namespace=SOCK_NAMESPACE, plate_id=plate_id)
        task_notifier.emit_task_start()

        plate = Plate.query.get(plate_id)
        if not plate:
            task_notifier.emit_task_failure(message="No Plate Map Uploaded")
        else:
            plate_map = csv.DictReader(plate_map_file)
            if 'well' not in [_.lower() for _ in plate_map.fieldnames]:
                task_notifier.emit_task_failure(message="Plate Map Malformed.")
                return

            locus_labels = [_.strip() for _ in plate_map.fieldnames if _.lower() not in ['', 'well']]
            clear_plate_map(plate_id)
            stale_tracker = dict()
            plate_map = list(plate_map)
            total_steps = len(plate_map)
            for idx, entry in enumerate(plate_map):
                socketio.sleep()
                well_label = entry['Well'].strip()
                for locus_label in locus_labels:
                    socketio.sleep()
                    sample_barcode = entry[locus_label].strip()

                    if sample_barcode:
                        sample = Sample.query.filter(Sample.barcode == sample_barcode).one_or_none()
                        locus = Locus.query.filter(Locus.label == locus_label).one_or_none()

                        if not sample and create_non_existent_samples:
                            sample = Sample(barcode=sample_barcode, designation='sample')
                            db.session.add(sample)

                        if not sample:
                            task_notifier.emit_task_failure(
                                message=f"Sample with barcode '{sample_barcode}' does not exist."
                            )
                            db.session.rollback()
                            return

                        if not locus:
                            task_notifier.emit_task_failure(
                                message=f"Locus with label '{locus_label}' does not exist."
                            )
                            db.session.rollback()
                            return

                        projects = get_projects_with_samples(sample.id, locus.id)
                        well = plate.wells_dict[well_label]
                        channel = well.channels_dict[locus.color]
                        channel.add_locus(locus.id)
                        channel.add_sample(sample.id)
                        for project in projects:
                            if not channel_in_project(project.id, channel.id):
                                project.add_channel(channel.id)
                                if not (project.id, locus.id,) in stale_tracker:
                                    lp = ProjectLocusParams.query.filter(
                                        ProjectLocusParams.project_id == project.id
                                    ).filter(
                                        ProjectLocusParams.locus_id == locus.id
                                    ).one()

                                    lp.scanning_parameters_stale = True
                                    stale_tracker[(project.id, locus.id)] = True

                        task_notifier.emit_task_progress(
                           progress={
                               'style': 'determinate',
                               'total': total_steps,
                               'current_state': idx + 1,
                               'message': f'Processing {sample_barcode} at {locus_label}...'
                           }
                        )

            task_notifier.emit_task_success(message="Plate Map Loaded Successfully")

    plate_map_file = request.files.getlist('files')[0]
    plate_map_stream = io.StringIO(plate_map_file.stream.read().decode("utf-8"), newline=None)
    create_non_existent_samples = request.form.get('create_non_existent_samples') == 'true'
    plate_id = int(request.form.get('plate_id'))

    socketio.start_background_task(bg_upload_plate_map, plate_map_stream, plate_id, create_non_existent_samples)
    return jsonify({'status': "Success"})
Ejemplo n.º 38
0
def upload_plates():
    if request.method == 'OPTIONS':
        return jsonify({'status': 'Success'})

    @copy_current_request_context
    def bg_upload_plates(plate_files, ladder_id):
        extracted_plates = []
        ladder = Ladder.query.get(ladder_id)
        task = 'upload_plate'
        task_notifier = TaskNotifier(task=task, namespace=SOCK_NAMESPACE, ladder_id=ladder_id)

        task_notifier.emit_task_start()

        for idx, plate_zip_file in enumerate(plate_files):
            try:
                with open(plate_zip_file, 'rb') as plate_zip:
                    extracted_plate = ExtractedPlate.from_zip_and_calculate_base_sizes(
                        zip_file=plate_zip,
                        ladder=ladder.base_sizes,
                        color=ladder.color,
                        base_size_precision=ladder.base_size_precision,
                        sq_limit=ladder.sq_limit,
                        filter_parameters=ladder.filter_parameters,
                        scanning_parameters=ladder.scanning_parameters
                    )

                    if not plate_hash_already_exists(extracted_plate.plate_hash):
                        extracted_plates.append(extracted_plate)
                        task_notifier.emit_task_progress(progress={
                                               'style': 'determinate',
                                               'total': len(plate_files) + 2,
                                               'current_state': idx + 1,
                                               'message': f'Parsing {extracted_plate.label}...',
                                           })
                        socketio.sleep()
                    else:
                        task_notifier.emit_task_progress(progress={
                            'style': 'determinate',
                            'total': len(plate_files),
                            'current_state': idx + 1,
                            'message': f"Cannot Process {extracted_plate.label}, Already Exists In Database."
                        })
                        socketio.sleep()
            except Exception as e:
                # print("Exception Not Caught", e)
                task_notifier.emit_task_progress(progress={
                    'style': 'determinate',
                    'total': len(plate_files),
                    'current_state': idx + 1,
                    'message': f"Cannot Process {plate_zip.filename}",
                })
                socketio.sleep()
            finally:
                os.remove(plate_zip_file)

        task_notifier.emit_task_progress(progress={
            'style': 'determinate',
            'total': len(plate_files) + 2,
            'current_state': len(plate_files) + 1,
            'message': f'Saving Plate Data...',
        })
        socketio.sleep()

        submitted_plates = [Plate.from_extracted_plate(plate, ladder) for plate in extracted_plates]
        db.session.commit()

        socketio.sleep()
        task_notifier.emit_task_success(message={'ids': [plate.id for plate in submitted_plates]})
        socketio.sleep()

    plate_zips = request.files.getlist('files')
    files = []
    for f in plate_zips:
        handle, tmpfile_path = tempfile.mkstemp()
        tmpfile = os.fdopen(handle, mode='wb')
        tmpfile.write(f.stream.read())
        files.append(tmpfile_path)

    ladder_id = request.form.get('ladder_id')
    socketio.start_background_task(bg_upload_plates, files, ladder_id)
    return jsonify({'status': "Success"})