Exemple #1
0
def main():
    if len(sys.argv) == 1:
        sys.argv.append('-h')

    args = docopt(__doc__)
    with ILock('gitlinks'):
        execute(args)
Exemple #2
0
def run_conversions(args):
    l = None
    with ILock(DB_CONV_LOCK):
        if os.path.exists(DB_CONV_FILE):
            with open(DB_CONV_FILE, "r") as f:
                l = f.readline()
            if l:
                remove_first_line(DB_CONV_FILE)
    if l:
        project, sample, protocol, organism = map(lambda x: x.strip(),
                                                  l.split('\t'))
        ps = (project, sample, protocol, organism)
        rawfile = get_sample_raw_path(ps)
        log(project, f"Started converting {rawfile}")
        set_status(ps, 'Converting')
        process = subprocess.Popen(CONVERSION_CMD.format(
            infile=rawfile, outdir=get_proj_root(project)),
                                   shell=True,
                                   stdout=subprocess.PIPE)
        process.wait()
        log(project, f'Converted {rawfile}')
        set_status(ps, 'Converted')
        append_list(DB_TANDEM_FILE, [(project, sample, protocol, organism)],
                    DB_TANDEM_LOCK)
        append_list(DB_MASCOT_FILE, [(project, sample, protocol, organism)],
                    DB_MASCOT_LOCK)
Exemple #3
0
def new_node(node):
    print("New node appeared: Name: %s" % (node.name, ))
    with ILock("controller"):
        if node.name in valid_nodes:

            nodes[node.name] = node
            program_name = node.name
            program_code = open(conf['files'][program_name], "r").read()
            program_args = ""
            program_port = conf['port'][program_name]

            controller.blocking(False).node(node).radio.activate_radio_program(
                {
                    'program_name': program_name,
                    'program_code': program_code,
                    'program_args': program_args,
                    'program_type': 'py',
                    'program_port': program_port
                })
            print("Started program %s" % (program_name, ))

            if node.name in stopped_nodes:
                stopped_nodes.remove(node.name)
        else:
            print("Node %s is not valid" % (node.name, ))
    print("Done configuring node: %s" % (node.name, ))
Exemple #4
0
def ace_attorney_anim(config: List[Dict], output_filename: str = "output.mp4"):
    root_filename = output_filename[:-4]
    audio_filename = output_filename + '.audio.mp3'
    text_filename = root_filename + '.txt'
    if os.path.exists(root_filename):
        shutil.rmtree(root_filename)
    os.mkdir(root_filename)
    with ILock('aa-render'):
        sound_effects = do_video(config, root_filename)
    do_audio(sound_effects, audio_filename)
    videos = []
    with open(text_filename, 'w') as txt:
        for file in os.listdir(root_filename):
            videos.append(file)
        videos.sort(key=lambda item: int(item[:-4]))
        for video in videos:
            txt.write('file ' + root_filename + '/' + video + '\n')
    textInput = ffmpeg.input(text_filename, format='concat')
    audio = ffmpeg.input(audio_filename)
    if os.path.exists(output_filename):
        os.remove(output_filename)
    out = ffmpeg.output(textInput,
                        audio,
                        output_filename,
                        vcodec="copy",
                        acodec="aac",
                        strict="experimental")
    out.run()
    if os.path.exists(root_filename):
        shutil.rmtree(root_filename)
    if os.path.exists(text_filename):
        os.remove(text_filename)
    if os.path.exists(audio_filename):
        os.remove(audio_filename)
Exemple #5
0
    def create_env(self) -> Env:
        env_dir = self.env_dirs[0]
        package = env_dir.name
        env_name = f"env_{self.se.stage}"
        env_file = env_dir / f"{env_name}.py"

        module_name = f"{package}.{env_name}"

        # We have to lock this part in case there's other shells concurrently executing this code
        with ILock("envo_lock"):
            self._create_init_files()

            # unload modules
            for m in list(sys.modules.keys())[:]:
                if m.startswith("env_"):
                    sys.modules.pop(m)

            try:
                module = import_from_file(env_file)
                env: Env
                env = module.Env()
                return env
            except ImportError as exc:
                raise EnvoError(
                    f"""Couldn't import "{module_name}" ({exc}).""")
            finally:
                self._delete_init_files()
Exemple #6
0
    def exclusive_create(self, data_f):
        with ILock(self.name + '.lock'):
            if self.exist():
                return

            with open(self.name, 'w') as f:
                data = data_f()
                f.write(data)
Exemple #7
0
def pytest_configure(config):
    if config.pluginmanager.hasplugin('xdist'):
        try:
            from ilock import ILock
            utils.lock = ILock("oggm_xdist_download_lock_" + getpass.getuser())
            logger.info("ilock locking setup successfully for xdist tests")
        except:
            logger.warning(
                "could not setup ilock locking for distributed tests")
Exemple #8
0
def pytest_configure(config):
    for marker in ["slow", "download", "creds", "internet", "test_env",
                   "graphic "]:
        config.addinivalue_line("markers", marker)
    if config.pluginmanager.hasplugin('xdist'):
        try:
            from ilock import ILock
            utils.lock = ILock("oggm_xdist_download_lock_" + getpass.getuser())
            logger.info("ilock locking setup successfully for xdist tests")
        except BaseException:
            logger.warning("could not setup ilock locking for distributed "
                           "tests")
Exemple #9
0
    def detect(self, image=None):

        Height, Width = image.shape[:2]

        img = image.copy()
        img = cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
        img = Image.fromarray(img)

        self.logger.Debug(
            1, '|---------- TPU (input image: {}w*{}h) ----------|'.format(
                Width, Height))

        start = datetime.datetime.now()

        self.logger.Debug(1, 'Waiting for TPU lock before detecting...')
        with ILock('coral_edge_tpu_pyzm'):
            self.logger.Debug(1, 'Got TPU lock for detection...')
            outs = self.model.detect_with_image(
                img,
                threshold=self.options.get('object_min_confidence'),
                keep_aspect_ratio=True,
                relative_coord=False)

        #str='[a] detected:car:91% h786p0j:90% --SPLIT--[{"type": "object", "label": "car", "box": [10, 79, 415, 423], "confidence": "91.02%"}, {"type": "licenseplate", "label": "h786p0j", "box": [147, 380, 289, 412], "confidence": "90.40%"}]'
        #print (str)
        #exit(0)

        diff_time = (datetime.datetime.now() - start).microseconds / 1000
        self.logger.Debug(
            1, 'Coral TPU detection took: {} milliseconds'.format(diff_time))

        bbox = []
        labels = []
        conf = []

        for out in outs:
            box = out.bounding_box.flatten().astype("int")
            (startX, startY, endX, endY) = box
            bbox.append([
                int(round(startX)),
                int(round(startY)),
                int(round(endX)),
                int(round(endY))
            ])
            labels.append(self.classes[out.label_id])
            conf.append(float(out.score))

        return bbox, labels, conf
Exemple #10
0
 async def run(self, client):
     for guild in client.guilds:
         print(guild)
         with ILock(str(guild.id)):
             activity_log = read_activity_log(guild.id)
             for member in guild.members:
                 if not member.bot:
                     if member.voice:
                         print(f"{member} está conectado. Progreso guardado.")
                         log = get_log(member.voice)
                         if log["type"] == "JOINED":
                             if activity_log[str(member)][-1]["type"] == "ONLINE":
                                 activity_log[str(
                                     member)][-1]["timestamp"] = time()
                             else:
                                 log["type"] = "ONLINE"
                                 activity_log[str(member)].append(log)
             save_activity_log(guild.id, activity_log)
Exemple #11
0
def log_current_users_activity(client):
    for guild in client.guilds:
        print(guild)
        with ILock(str(guild.id)):
            activity_log = read_activity_log(guild.id)
            for member, logs in activity_log.items():
                log = logs[-1].copy()
                log["type"] = "LEFT"
                activity_log[member].append(log)
            for member in guild.members:
                if not member.bot:
                    if member.voice:
                        print(f"{member} está conectado.")
                        idle = str(member.status) == "idle"
                        log = get_log(member.voice, idle=idle)
                        if str(member) not in activity_log:
                            activity_log[str(member)] = []
                        activity_log[str(member)].append(log)
            save_activity_log(guild.id, activity_log)
Exemple #12
0
def run_gimport(args):
    gdata = get_current_table()

    old_samples = read_list(DB_IMPORTED_FILE, DB_IMPORTED_LOCK)
    samples = []
    for k, row in gdata.iterrows():
        project = row[PROJECT_HEADER]
        sample = row[SAMPLE_HEADER]
        protocol = row[PROTOCOL_HEADER]
        organism = row[ORGANISM_HEADER]

        if os.path.exists(
                get_sample_raw_path((project, sample, protocol, organism))):
            samples.append((project, sample, protocol, organism))
        else:
            set_status((project, sample, protocol, organism),
                       "No file found: {}".format(
                           get_sample_raw_path(
                               (project, sample, protocol, organism))))

    # remove  already imported samples
    samples = list(filter(lambda x: x not in old_samples, samples))
    append_list(DB_IMPORTED_FILE, samples, DB_IMPORTED_LOCK)

    conv_queue = []
    conv_old = read_list(DB_CONV_FILE, DB_CONV_LOCK)

    for project, sample, protocol, organism in samples:
        root = get_proj_root(project)
        if not os.path.exists(root):
            os.mkdir(root)
            log(project, f"Created root for project {project}")
        if (project, sample, protocol, organism) not in conv_old:
            conv_queue.append((project, sample, protocol, organism))
            set_status((project, sample, protocol, organism),
                       "Waiting for the analysis")
            log(project, f"Sample ID {sample} is waiting for the analysis")

    with ILock(DB_CONV_LOCK):
        with open(DB_CONV_FILE, "a") as f:
            for project, sample, protocol, organism in conv_queue:
                f.writelines(f"{project}\t{sample}\t{protocol}\t{organism}\n")
Exemple #13
0
def add_activity_log(member, before, after):
    if not member.bot:
        with ILock(str(member.guild.id)):
            activity_log = read_activity_log(member.guild.id)
            name = str(member)
            if name not in activity_log:
                activity_log[name] = []
            log = get_log(after, idle=str(member.status) == "idle")
            if log["type"] == "JOINED":
                print(f"{member} se conectó a {after.channel.name}")
            else:
                print(f"{member} se enojó")
            new = True
            if activity_log[name]:
                last_log = activity_log[name][-1]
                if last_log["type"] == log["type"]:
                    new = False
            if new:
                print("Agregado")
                activity_log[name].append(log)
            save_activity_log(member.guild.id, activity_log)
Exemple #14
0
    def __init__(self, options={}, logger=None):
        Base.__init__(self, logger)
        self.classes = {}
        self.options = options
        start = datetime.datetime.now()
        self.logger.Debug(1, 'UID:{} EUID:{}'.format(os.getuid(),
                                                     os.geteuid()))
        self.logger.Debug(1, 'Waiting for TPU lock...')
        with ILock('coral_edge_tpu_pyzm'):

            self.logger.Debug(
                1, 'Lock acquired, TPU loading {}'.format(
                    self.options.get('object_weights')))
            self.model = DetectionEngine(self.options.get('object_weights'))

        diff_time = (datetime.datetime.now() - start).microseconds / 1000
        self.logger.Debug(
            1,
            'TPU initialization (loading model from disk) took: {} milliseconds'
            .format(diff_time))
        self.populate_class_labels()
Exemple #15
0
def run_tandem(args):
    l = None
    with ILock(DB_TANDEM_LOCK):
        if os.path.exists(DB_TANDEM_FILE):
            with open(DB_TANDEM_FILE, "r") as f:
                l = f.readline()
            if l:
                remove_first_line(DB_TANDEM_FILE)
    if l:
        project, sample, protocol, organism = map(lambda x: x.strip(),
                                                  l.split('\t'))
        psample = (project, sample, protocol, organism)
        confpath = os.path.join(get_proj_root(project), sample + ".tconf.xml")
        mgfpath = get_sample_mgf_path(psample)

        tandem_db = get_db(organism, TANDEM_DB_HEADER)
        tandem_prefs = get_prefs(protocol, TANDEM_PREFS_HEADER)

        tandemconf = tandem_stub.format(
            defaults_path=tandem_prefs,
            taxonomy_path=TANDEM_TAXONOMY,
            mgf_file=mgfpath,
            output_path=get_sample_tandem_path(psample),
            taxon=tandem_db)
        with open(confpath, "w") as f:
            f.writelines(tandemconf)
        set_status(psample, "Identification (Tandem) running")
        log(project,
            "Starting X!Tandem: " + TANDEM_CMD.format(infile=confpath))
        process = subprocess.Popen(TANDEM_CMD.format(infile=confpath),
                                   shell=True,
                                   stdout=subprocess.PIPE)
        process.wait()
        log(project,
            f"X!Tandem finished with {process.returncode}, {process.stdout}")
        if os.path.exists(get_sample_mascot_path(psample)):
            amp = 'Mascot&Tandem'
        else:
            amp = 'Tandem'
        set_status(psample, f"Identification ({amp}) finished")
Exemple #16
0
    def get_env(self) -> Env:
        env_dir = self.env_dirs[0]
        package = env_dir.name
        env_name = f"env_{self.se.stage}"
        env_file = env_dir / f"{env_name}.py"

        module_name = f"{package}.{env_name}"

        with ILock("envo_lock"):
            self._create_init_files()

            self._unload_modules()

            try:
                module = import_module_from_file(env_file)
                env: Env
                env = module.Env()
                return env
            except ImportError as exc:
                logger.error(f"""Couldn't import "{module_name}" ({exc}).""")
                raise
            finally:
                self._delete_init_files()
Exemple #17
0
    def run(self):
        log('dependencies done; run phenotype reconciliation')
        client = util.mongo_client()

        try:
            data_access.update_job_status(str(self.job), util.conn_string,
                                          data_access.IN_PROGRESS,
                                          "Finished Pipelines")

            phenotype = data_access.query_phenotype(int(self.phenotype),
                                                    util.conn_string)
            # log(phenotype)

            db = client[util.mongo_db]

            data_access.update_job_status(str(self.job), util.conn_string,
                                          data_access.IN_PROGRESS,
                                          "Filtering Results")

            stats = phenotype_stats(str(self.job), True)
            intermediate_stats = phenotype_stats(str(self.job), False)
            data_access.update_job_status(
                str(self.job), util.conn_string,
                data_access.STATS + "_INTERMEDIATE_RESULTS",
                str(intermediate_stats["results"]))
            data_access.update_job_status(
                str(self.job), util.conn_string,
                data_access.STATS + "_INTERMEDIATE_SUBJECTS",
                str(intermediate_stats["subjects"]))
            data_access.update_job_status(str(self.job), util.conn_string,
                                          data_access.STATS + "_FINAL_RESULTS",
                                          str(stats["results"]))
            data_access.update_job_status(
                str(self.job), util.conn_string,
                data_access.STATS + "_FINAL_SUBJECTS", str(stats["subjects"]))
            log("writing job stats....")
            log(json.dumps(stats, indent=4))
            # data_access.update_job_status(str(self.job), util.conn_string, data_access.STATS + "_CACHE_QUERY_COUNTS",
            #                               str(util.get_cache_query_count()))
            # data_access.update_job_status(str(self.job), util.conn_string,data_access.STATS + "_CACHE_COMPUTE_COUNTS",
            #                               str(util.get_cache_compute_count()))
            # data_access.update_job_status(str(self.job), util.conn_string, data_access.STATS + "_CACHE_HIT_RATIO",
            #                               str(util.get_cache_hit_ratio()))

            for k in util.properties.keys():
                data_access.update_job_status(str(self.job), util.conn_string,
                                              data_access.PROPERTIES + "_" + k,
                                              util.properties[k])
            with self.output().open('w') as outfile:
                phenotype_helper.write_phenotype_results(
                    db, self.job, phenotype, self.phenotype, self.phenotype)

                # do tuple processing now that all tasks have completed
                succeeded = tuple_processor.process_tuples(
                    db['phenotype_results'], int(self.job))
                if not succeeded:
                    log('*** ERROR: tuple processing failed ***')

                # force all mongo writes to complete by calling fsync on the admin db, then releasing the lock

                wrote_docs = False
                for tries in range(1, _MAX_ATTEMPTS):

                    try:
                        with ILock(_LOCK_NAME, timeout=_LOCK_WAIT_SECS):

                            # only a SINGLE ClarityNLP process can execute this code at any time

                            # force writes to disk by locking the Mongo admin database
                            log('*** Job {0}: FORCING MONGO WRITES ***'.format(
                                self.job))

                            admin_db = client['admin']
                            fsync_result = admin_db.command('fsync', lock=True)
                            assert 1 == fsync_result['lockCount']
                            unlock_result = admin_db.command('fsyncUnlock')
                            assert 0 == unlock_result['lockCount']

                            log('*** Job {0}: ALL MONGO WRITES COMPLETED ***'.
                                format(self.job))

                            wrote_docs = True

                    except ILockException:
                        # timed out before acquiring the lock, will try again
                        pass

                    if wrote_docs:
                        break

                if not wrote_docs:
                    log('Job {0} failed to lock the Mongo admin database.'.
                        format(self.job))

                data_access.update_job_status(str(self.job), util.conn_string,
                                              data_access.COMPLETED,
                                              "Job completed successfully")
                outfile.write("DONE!")
                outfile.write('\n')

            log("job {} done!".format(self.job))
        except BulkWriteError as bwe:
            log(bwe.details)
            data_access.update_job_status(str(self.job), util.conn_string,
                                          data_access.WARNING,
                                          str(bwe.details))
        except Exception as ex:
            traceback.print_exc(file=sys.stdout)
            data_access.update_job_status(str(self.job), util.conn_string,
                                          data_access.FAILURE, str(ex))
            log(ex)
        finally:
            client.close()
Exemple #18
0
 def get_vr1tx_split1_uplink():
     with ILock("controller"):
         return values['vr1tx-split1']['iq_rxrate'] * 32
Exemple #19
0
 def get_vr2tx_uplink():
     with ILock("controller"):
         return values['vr2tx']['iq_rxrate'] * 32
Exemple #20
0
 def get_usrp_vr2_uplink():
     with ILock("controller"):
         return values['usrp']['vr2_iq_rxrate'] * 32
Exemple #21
0
    def send_all(*args):
        try:
            with ILock('send_all_notifications_lock', 0.01):
                time.sleep(
                    10
                )  # although more appropriate in the calling function, the sleep here ensures that the second process times-out on the ILock, and closes the thread
                from django.contrib.sites.models import Site
                from django.contrib.auth import get_user_model
                from django.core.mail import mail_admins
                from pinax.notifications.models import NoticeQueueBatch
                from pinax.notifications.signals import emitted_notices
                from pinax.notifications import models as notification

                batches, sent, sent_actual = 0, 0, 0
                start_time = time.time()

                try:
                    for queued_batch in NoticeQueueBatch.objects.all():
                        notices = pickle.loads(
                            base64.b64decode(queued_batch.pickled_data))
                        for user, label, extra_context, sender in notices:
                            try:
                                user = get_user_model().objects.get(pk=user)
                                logger.info(
                                    "emitting notice {0} to {1}".format(
                                        label, user))
                                # call this once per user to be atomic and allow for logging to
                                # accurately show how long each takes.
                                if notification.send_now([user], label,
                                                         extra_context,
                                                         sender):
                                    sent_actual += 1
                            except get_user_model().DoesNotExist:
                                # Ignore deleted users, just warn about them
                                logger.info(
                                    "not emitting notice {0} to user {1} since it does not exist"
                                    .format(label, user))
                            sent += 1
                        queued_batch.delete()
                        batches += 1
                    emitted_notices.send(sender=NoticeQueueBatch,
                                         batches=batches,
                                         sent=sent,
                                         sent_actual=sent_actual,
                                         run_time="%.2f seconds" %
                                         (time.time() - start_time))
                except Exception:  # pylint: disable-msg=W0703
                    # get the exception
                    _, e, _ = sys.exc_info()
                    # email people
                    current_site = Site.objects.get_current()
                    subject = "[{0} emit_notices] {1}".format(
                        current_site.name, e)
                    message = "\n".join(
                        traceback.format_exception(
                            *sys.exc_info())  # pylint: disable-msg=W0142
                    )
                    mail_admins(subject, message, fail_silently=True)
                    # log it as critical
                    logger.error("Exception: {0}".format(e))

                if sent > 0:
                    logger.info("{0} batches, {1} sent".format(
                        batches,
                        sent,
                    ))
                    logger.info("done in {0:.2f} seconds".format(time.time() -
                                                                 start_time))
                return True
        except Exception as e:
            logger.error("send_all Exception (thread #%s): %s",
                         threading.get_ident(), e)
            return False
Exemple #22
0
    def login_sign_on(self, url, cache_file=".session.cache", force_level=0):

        from ilock import ILock
        from getpass import getpass
        from os import remove, path

        cache = None
        cache_file = path.abspath(cache_file)
        cache_lock_id = b64encode(cache_file.encode('utf-8')).decode()
        cache_time = self.file_mtime(cache_file)

        with ILock(cache_lock_id):

            if force_level == 1 and cache_time != self.file_mtime(cache_file):
                force_level = 0

            if force_level == 2:
                remove(cache_file)

            if path.isfile(cache_file):

                logging.debug('%s found', cache_file)
                with open(cache_file, 'r') as f:
                    cache = json.loads(f.read())

            else:

                logging.debug('%s not found', cache_file)

            if force_level > 0 or cache is None or 'cookies' not in cache:

                if cache is not None and 'secret' in cache:
                    secret = b64decode(cache['secret'].encode()).decode()
                    username, password = secret.split('/')
                    password = b64decode(password.encode()).decode()
                else:
                    username = input("Username: "******"Password: "******"%s is guest account." % username)

                        root = self.html_root(r1)
                        link = root.find(
                            ".//{http://www.w3.org/1999/xhtml}a[@id='zocial-guest']"
                        )
                        guest_url = self.split_url(r1.url)[0] + self.split_url(
                            link.get('href'))[1]
                        logging.debug(guest_url)
                        r1 = s.get(guest_url,
                                   timeout=10,
                                   verify=False,
                                   allow_redirects=True)
                        r1.raise_for_status()

                    else:
                        logging.debug("%s is a regular account." % username)

                    action, form_data = self.read_form(r1)

                    form_data['username'] = username
                    form_data['password'] = password

                    r2 = s.post(url=action,
                                data=form_data,
                                timeout=self.DEFAULT_TIMEOUT_SECONDS,
                                allow_redirects=True)
                    r2.raise_for_status()
                    action, form_data = self.read_form(r2)

                    r3 = s.post(url=action,
                                data=form_data,
                                timeout=self.DEFAULT_TIMEOUT_SECONDS,
                                allow_redirects=True)

                    cache = {
                        'secret':
                        b64encode((username + '/' + b64encode(
                            password.encode()).decode()).encode()).decode(),
                        'location':
                        url,
                        'cookies': {c.name: c.value
                                    for c in s.cookies}
                    }

                    with open(cache_file, 'w') as f:
                        f.write(json.dumps(cache))

            return cache['cookies']
Exemple #23
0
numberOfIterationsMax = 5
cwd = os.getcwd()

path = "/home/homeassistant/.homeassistant/www"
os.chdir(path)

logging.basicConfig(
    filename='debug_ebus',
    filemode='a',
    format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
    datefmt='%Y-%m-%d %H:%M:%S',
    level=logging.ERROR)

logging.debug("start directory")
logging.debug(cwd)
with ILock('ebus', timeout=200):
    #read temperature measured by thermostat
    #time.sleep(3)
    logging.debug("read ebus started")
    cp = subprocess.run(["ebusctl read z1RoomTemp"],
                        shell=True,
                        stdout=subprocess.PIPE)
    logging.debug("read RoomTemp 1")
    logging.debug(cp)
    cp_string = cp.stdout.decode('utf-8')
    busread = cp_string[0:5]
    if busread == 'error':
        # cp = subprocess.run(["ebusd -f --scanconfig >/dev/null 2>&1"],shell=True,stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
        subprocess.Popen(["nohup", "ebusd", "-f", "--scanconfig"])
        print('fubar')
        time.sleep(5)
Exemple #24
0
def make_app():
    with ILock('sandwalker-init-lock'):
        app = create_app()
    return app
Exemple #25
0
def run_mascot(args):
    l = None
    with ILock(DB_MASCOT_LOCK):
        if os.path.exists(DB_MASCOT_FILE):
            with open(DB_MASCOT_FILE, "r") as f:
                l = f.readline()
            if l:
                remove_first_line(DB_MASCOT_FILE)
    if not l:
        return

    project, sample, protocol, organism = map(lambda x: x.strip(),
                                              l.split('\t'))
    psample = (project, sample, protocol, organism)
    mgfpath = get_sample_mgf_path(psample)
    datpath = get_sample_mascot_path(psample)
    set_status(psample, "Identification (Mascot) running")
    mascot_db = get_db(organism, MASCOT_DB_HEADER)
    mascot_prefs = get_prefs(protocol, MASCOT_PREFS_HEADER)
    pars = get_default_mascot_pars(mascot_prefs)

    pars['DB'] = mascot_db
    pars['COM'] = 'msauto_prot1: ' + '/'.join(psample)

    session = mascot_login(MASCOT_CGI, 'mascotadmin', 'R251260z')
    if session:
        log(project, f"Logged in {MASCOT_CGI}")

    pars1 = {}
    # pars1['MASCOT_SESSION'] = cookies['MASCOT_SESSION']
    # pars1['MASCOT_USERID'] = cookies['MASCOT_USERID']
    # pars1['MASCOT_USERNAME'] = cookies['MASCOT_USERNAME']
    sendurl = MASCOT_CGI + '/nph-mascot.exe?1'
    with open(mgfpath, 'rb') as f:
        response = session.post(sendurl, files={'FILE': f}, data=pars)
    log(project, "Mascot response was: " + response.content.decode())
    if response.ok:
        error_result = re.match('Sorry, your search could not be performed',
                                response.content.decode())
        if error_result:
            log('Search failed')
        #http://mascot.ripcm.com/mascot/cgi/master_results_2.pl?file=../data/20210420/F052415.dat
        match = re.search(
            r'master_results_2\.pl\?file=.*data/(?P<date>\d+)/(?P<file>F\d+\.dat)',
            response.content.decode())
        date, file = match.group('date'), match.group('file')

        mascot_xcgi = MASCOT_CGI.replace('cgi', 'x-cgi')
        get_url = mascot_xcgi + f'/ms-status.exe?Autorefresh=false&Show=RESULTFILE&DateDir={date}&ResJob={file}'
        log(project, f'Downloading file {get_url}')

        with session.get(get_url, stream=True) as r:
            r.raise_for_status()
            with open(datpath, 'wb') as f:
                for chunk in r.iter_content(chunk_size=8192):
                    f.write(chunk)
        log(project, 'Downloaded')
        if os.path.exists(get_sample_tandem_path(psample)):
            amp = 'Mascot&Tandem'
        else:
            amp = 'Mascot'
        set_status(psample, f'Identification ({amp}) finished')

    else:
        log(project, "Bad response")
Exemple #26
0
 def get_usrp_vr1_downlink():
     with ILock("controller"):
         return values['usrp']['vr1_iq_txrate'] * 32
Exemple #27
0
    os.remove(filename)
    os.remove("subs.ass")
    shutil.move("tmp.mkv", filename)


if __name__ == "__main__":
    os.chdir(os.path.dirname(os.path.abspath(__file__)))

    parser = argparse.ArgumentParser()
    parser.add_argument("--only", default=None)
    parser.add_argument("--style", default="style.ass")
    parser.add_argument("input")

    args = parser.parse_args()

    print("acquiring lock")
    with ILock("subs"):
        if os.path.isdir(args.input):
            print("directory", args.input)
            for filename in os.listdir(args.input):
                if os.path.splitext(filename)[1].lower() != ".mkv": continue
                path = os.path.join(args.input, filename)
                if os.path.isfile(path):
                    apply(path, args.only, args.style)
        else:
            if os.path.splitext(args.input)[1].lower() == ".mkv":
                apply(args.input, args.only, args.style)
            else:
                print("input isnt mkv")
Exemple #28
0
 def wrapper(*args, **kwargs):
     with ILock(lockname):
         return func(*args, **kwargs)
Exemple #29
0
def save_codes(codes):
    with ILock('codes_write'):
        with open('codes.yml', 'w') as outfile:
            yaml.dump(codes, outfile, default_flow_style=False)
Exemple #30
0
 def get_vr2tx_downlink():
     with ILock("controller"):
         return values['vr2tx']['tx_iq_rate'] * 32