Example #1
0
 def eval_best_model(self, epoch):
     if self.metrics['val_loss_avg'][-1] < self.best_loss:
         self.best_loss = self.metrics['val_loss_avg'][-1]
         print(
             'New best model at epoch {:0=3d} with val_loss {:.4f}'.format(
                 epoch, self.best_loss))
         utils.flush()
Example #2
0
    def maybe_flush(self):
        global prev_host

        new_host = self.headers.get("Host", "")
        if prev_host != new_host:
            utils.flush()
            prev_host = new_host
Example #3
0
    def maybe_flush(self):
        global prev_host

        new_host = self.headers.get("Host", "")
        if prev_host != new_host:
            utils.flush()
            prev_host = new_host
def set_function(bot: Bot, update: Update):
    print(TAG(), "called set: ", update.message)
    chat_id = update.message.chat_id
    args = update.message.text.split(" ")
    if len(args) < 2:
        bot.send_message(chat_id=chat_id,
                         text="Invalid message. Command pattern is:")
        bot.send_message(chat_id=chat_id, text="/set name URL [CSS SELECTOR]")
        return

    name, url = args[1], args[2]
    watcher = Watcher(name, url, update)
    if len(args) > 3:
        watcher.selector = " ".join(args[3:])
        watcher.type = Selector.CSS
    else:
        watcher.type = Selector.NONE
    ok = watcher_manager.add_watcher(chat_id, watcher)
    if ok:
        bot.send_message(
            chat_id=chat_id,
            text="Notifier {0} correctly created! (SELECTOR: '{1}' ({2}))".
            format(watcher.name, watcher.selector, watcher.type))
        print("{0}: watcher {1} created.".format(chat_id, name))
    else:
        bot.send_message(
            chat_id=chat_id,
            text="Notifier {0} already exists. Please delete it".format(name))

    flush()
Example #5
0
def log_execute(args, handler=None, env=None, cwd=None, shell=False):
    utils.flush()
    p = subprocess.Popen(args,
                         stdout=subprocess.PIPE,
                         stderr=subprocess.STDOUT,
                         shell=shell,
                         env=expand_variables(env),
                         cwd=cwd)

    reader = _TailThread(fileInput=p.stdout,
                         handler=handler,
                         fromBegining=True)
    rc = None
    try:
        reader.start()

        # Wait until subprocess is done
        rc = p.wait()

    finally:
        # Wait until we've processed all output
        reader.stop(False)
        if reader.is_alive(): reader.join()

        utils.flush()
    return rc
def clear_function(bot: Bot, update):
    print(TAG(), "called clear: ", update.message)
    chat_id = update.message.chat_id
    watcher_manager.clear_watcher(chat_id)
    bot.send_message(chat_id=chat_id, text="All notifiers are deleted")

    flush()
def start_function(bot, update):
    print(TAG(), "called start: ", update.message)
    chat_id = update.message.chat_id
    args = update.message.text.split(" ")
    if len(args) > 1:
        name = args[1]
        ok = watcher_manager.start_watcher(chat_id, name)
        if ok == "started":
            bot.send_message(
                chat_id=chat_id,
                text="The notifier {0} now is running".format(name))
        elif ok == "already":
            bot.send_message(
                chat_id=chat_id,
                text="The notifier {0} is already running".format(name))
        else:
            bot.send_message(
                chat_id=chat_id,
                text="The notifier {0} doesn't exist".format(name))
    else:
        bot.send_message(
            chat_id=chat_id,
            text="Insert the notifier name that you want to start")

    flush()
Example #8
0
def create_summary_accs():
    flush('Creating main accounts...')

    query = db.session.query(
        db.Hesaplar.ana_hesap, db.Hesaplar.lead_code, *[
            func.sum(getattr(db.Hesaplar,
                             '{}'.format(period))).label('{}'.format(period))
            for period in db.periodss
        ]).group_by('ana_hesap').all()

    for k in query:
        unmapped = None

        if k.lead_code == 'Unmapped':
            unmapped = db.session.query(db.Hesaplar).filter_by(
                len=3, ana_hesap=k.ana_hesap).first() or db.session.query(
                    db.Hesaplar).filter_by(ana_hesap=k.ana_hesap).first()

        source = db.session.query(
            db.Lead).filter_by(account=k.ana_hesap).first()

        main_source = unmapped or source
        name = main_source.name

        t = db.Hesaplar(number=k.ana_hesap,
                        ana_hesap=k.ana_hesap,
                        name=name,
                        lead_code=main_source.lead_code,
                        len=3)

        for q in db.periodss:
            [setattr(t, q, getattr(k, q))]
            db.session.add(t)
            db.session.commit()
def backup():
    while True:
        time.sleep(60)
        print(TAG(), "thread_backup:", "START BACKUP ROUTINE")
        watcher_manager.save_watchers()
        print(TAG(), "thread_backup:", "BACKUP COMPLETED")

        flush()
Example #10
0
def print_output(name, src, toStdErr):
    try:
        while not finished and src is not None:
            out(toStdErr, src.readline())
            flush(toStdErr)
        src.close()
    except:
        pass
Example #11
0
def fix_mainaccs():
    flush('Changing the names of main accounts...')
    hesaplar = db.session.query(db.Hesaplar).filter_by(len=3)
    for k in hesaplar.all():
        item = db.session.query(db.Lead).filter_by(account=k.ana_hesap).first()
        if item:
            k.name = item.account_name

    db.session.commit()
Example #12
0
    def prepare_mapping():
        s = excel_file.sheet_by_name("Mapping")

        for x in range(1, s.nrows):
            db.session.add(
                db.Lead(account=s.cell(x, 0).value,
                        account_name=s.cell(x, 1).value,
                        lead_code=s.cell(x, 2).value,
                        name=s.cell(x, 3).value))
        flush('Parsing mappings...')
        db.session.commit()
Example #13
0
    def define_variables():
        flush('Defining Company names and audit periods...')
        sheet = excel_file.sheet_by_name('Instruction')

        db.tanimlar['company'] = sheet.cell_value(6, 2)
        try:
            db.tanimlar['optional'] = sheet.cell_value(7, 2) or "NO"
        except IndexError:
            flush('There is no optional column...')

        for sheet in excel_file.sheets()[2:]:
            db.periodss.append(sheet.name)
def list_function(bot: Bot, update):
    print(TAG(), "called list: ", update.message)
    chat_id = update.message.chat_id
    watchers = watcher_manager.get_watchers(chat_id)
    if len(watchers) > 0:
        text = ""
        for watcher in watchers:
            text += str(watcher) + "\n"
    else:
        text = "No notifiers available"
    bot.send_message(chat_id=chat_id, text=text)

    flush()
Example #15
0
def thread_function(watchers_manager: WatcherManager):
    LOG = "thread_watchers:"
    print(TAG(), LOG, "started")
    while True:
        # acquire lock
        watchers_manager.watchers_lock.acquire()
        print(TAG(), LOG, "start updating watchers")
        try:
            # start selenium browser
            browser = webdriver.PhantomJS(executable_path=PATH_PHANTOM)
            # for each watcher
            for user_watchers in watchers_manager.watchers.values():
                for watcher in user_watchers:
                    if watcher.isRunning:
                        browser.get(watcher.url)
                        if watcher.type == Watcher.Selector.CSS:
                            elements = browser.find_elements_by_css_selector(
                                watcher.selector)
                            text = "".join(
                                [element.text for element in elements])
                        else:
                            text = browser.find_element_by_css_selector(
                                'body').text

                        if watcher.old_text is None:
                            watcher.old_text = text
                        else:
                            if watcher.old_text != text:
                                watcher.old_text = text
                                watcher.update.message.\
                                    reply_text("Notifier {0} has seen new changes! Go to see them:\n{1}"
                                               .format(watcher.name, watcher.url))
                                print(
                                    TAG(), LOG,
                                    "updated watcher {0}: change saved!".
                                    format(watcher.name))
                            else:
                                print(
                                    TAG(), LOG,
                                    "watcher {0} checked -> no changes".format(
                                        watcher.name))
            browser.close()
            print(TAG(), LOG, "checked every running watcher")
        except Exception as e:
            print(LOG, e, file=sys.stderr)
        finally:
            # release lock
            watchers_manager.watchers_lock.release()
            # wait for next iteration
        flush()
        time.sleep(TIMER)
Example #16
0
def delete_zeros(exceptions=None):
    if exceptions is None:
        exceptions = ['900']

    flush('Deleting accounts with zero balances in all periods...')
    query = db.session.query(
        db.Hesaplar).filter_by(**{k: 0
                                  for k in db.periodss}).all()
    for item in query:
        if item.number[:3] in exceptions:
            continue
        db.session.delete(item)

    db.session.commit()
Example #17
0
def execute_forward(build_cfg):

    if build_cfg.forwarding_dir() is not None:
        build_script = build_cfg.build_script()
        utils.flush()
        build_script.forward_buildresults()
        utils.flush()
        with open(os.path.join(build_cfg.genroot_dir(), 'forward.path'),
                  "w") as f:
            f.write(build_cfg.forwarding_destination())

        log.info("build result forwarding succeeded")
    else:
        log.info(
            "build result forwarding step skipped because of missing option -F"
        )
Example #18
0
        def exec_phase(name, method):
            if name == 'PRELUDE':
                log.info('='*100)
            log.info('= {0:96} ='.format('entering xmake ' + name + ' phase'))
            log.info('='*100)

            try:
                method(build_cfg)
                notify_phase_ended(name, build_cfg)
                utils.flush()
            except SystemExit:
                raise
            except BaseException:
                raise
            log.info('end of xmake ' + name + ' phase')
            log.info('='*100)
Example #19
0
def handle_bootstrapper(xmake_inst_dir,argv):
    f=join(xmake_inst_dir,'BOOTSTRAPPER_VERSION')
    #log.info("looking for "+f)
    if len(argv)>1 and argv[1]=='--select-bootstrapper':
        select_bootstrapper(f,argv)
        sys.exit(0)

    if isfile(f):
        log.info("determining bootstrapper")
        v=get_first_line(f,'cannot read '+f)
        if v!=None:
            (v,l)=load_latest(v)
            if (l!=None):
                cmd=[sys.executable, join(l,'xmake','bootstrap.py'),'--bootstrap']
                cmd.extend(argv[1:])
                flush()
                rc=subprocess.call(cmd)
                sys.exit(rc)
Example #20
0
 def summarize_metrics(self, matrix, prefix):
     for key in sorted(self.metrics_epoch.keys()):
         self.metrics['{}_{}'.format(key, 'avg')].append(
             self.metrics_epoch[key].avg)
         self.metrics['{}_{}'.format(key, 'std')].append(
             self.metrics_epoch[key].std)
         print('epoch{:0=3d}_{}{:.4f}'.format(
             self.metrics['epochs'][-1], '{}_{}'.format(key, 'avg'),
             self.metrics['{}_{}'.format(key, 'avg')][-1]))
         print('epoch{:0=3d}_{}{:.4f}'.format(
             self.metrics['epochs'][-1], '{}_{}'.format(key, 'std'),
             self.metrics['{}_{}'.format(key, 'std')][-1]))
     print(matrix)
     self.metrics['{}_acc'.format(prefix)].append(metrics.accuracy(matrix))
     print('epoch{:0=3d}_{}{:.4f}'.format(
         self.metrics['epochs'][-1], '{}_acc'.format(prefix),
         self.metrics['{}_acc'.format(prefix)][-1]))
     utils.flush()
Example #21
0
def find_bds():
    flush('Determining breakdowns...')
    accounts = db.session.query(db.Hesaplar).all()
    for k in accounts:
        if db.tanimlar['optional'] != 'NO' and sum_var is False:
            if len(k.number) > 3:
                k.bd = True
        elif db.tanimlar['optional'] != 'NO' and sum_var is True:
            if len(db.session.query(db.Hesaplar).filter(and_(db.Hesaplar.number.startswith(k.number),
                                                             db.Hesaplar.optional == k.optional)).all()) <= 1 \
                    and len(k.number) > 3:
                k.bd = True
        else:
            if len(
                    db.session.query(db.Hesaplar).filter(
                        db.Hesaplar.number.startswith(k.number)).all()) <= 1:
                k.bd = True

    db.session.commit()
    flush('Breakdowns are set!')
def del_function(bot: Bot, update):
    print(TAG(), "called del: ", update.message)
    chat_id = update.message.chat_id
    args = update.message.text.split(" ")
    if len(args) < 2:
        bot.send_message(chat_id=chat_id,
                         text="Invalid message. Command pattern is:")
        bot.send_message(chat_id=chat_id, text="/del notifierName")
        return

    name = args[1]
    ok = watcher_manager.delete_watcher(chat_id, name)
    if ok:
        bot.send_message(chat_id=chat_id,
                         text="Notifier {0} deleted!".format(name))
    else:
        bot.send_message(chat_id=chat_id,
                         text="Notifier {0} not found.".format(name))

    flush()
Example #23
0
    def run_training(self):
        self.smaller_validation_loss = float('inf')
        for epoch in range(self.number_of_epochs):
            print("\nPROGRESS: {0:02.2f}%\n".format(
                epoch / self.number_of_epochs * 50),
                  flush=True)
            self.train(epoch)

            flush(self.logger)
            if self.update_learning_rate:
                self.update_lr(epoch)

        print("\nPROGRESS: {0:02.2f}%\n".format(
            (epoch + 0.9) / self.number_of_epochs * 50),
              flush=True)

        self.evaluate_learned_policy(self.number_of_epochs *
                                     self.training_steps,
                                     save_results=True,
                                     number_of_epochs=1,
                                     number_of_steps=100000)
        self.best_policy.dump_network(self.cloned_network_path)
Example #24
0
def output_timeseries_chis(
    myfilename_prefix,
    myreslist,
    colnames,
    nsims=6,
    nchi=6,
):
    #print "shape of matrix to ouput:"+str(mymatrix.shape)+"\n"
    min_num_angles = min(myreslist[0].numangles)
    timeseries_chis_matrix = zeros(
        (nsims, len(myreslist) * nchi, min_num_angles), float64)  #initialize
    #self.angles = zeros((self.nchi,num_sims,max_angles),float64)         # the dihedral angles

    for res_ind1, myres1 in zip(range(len(myreslist)), myreslist):
        print "\n#### Working on residue %s (%s):" % (
            myres1.num, myres1.name), utils.flush()
        for mychi1 in range(myres1.nchi):
            #print "%s chi: %d/%d" % (myres1.name,int(myres1.num),mychi1+1)
            #print "res_ind1: "+str(res_ind1)
            #rint "mychi1: "+str(mychi1)
            #print "nchi: " +str(nchi)
            #print "min_num_angles: "+str(min_num_angles)
            #print "res_ind1 * nchi + mychi1: "+str(res_ind1 * nchi + mychi1)
            #print "myres1.angles: "
            #print myres1.angles
            #print "angle entries: "
            #print myres1.angles[mychi1, :, :min_num_angles]
            timeseries_chis_matrix[:, res_ind1 * nchi +
                                   mychi1, :] = myres1.angles[
                                       mychi1, :, :min_num_angles]
    my_file_list = []
    for mysim in range(nsims):
        myfile = open(myfilename_prefix + "_" + str(mysim) + ".txt", 'w')
        for col_num, col_name in zip(range(len(colnames)), colnames):
            for col_chi in range(nchi):
                myfile.write(col_name + "_" + str(col_chi) + " ")
            myfile.write("\n")

        for myrow in range(min_num_angles):
            for col_num, col_name in zip(range(len(colnames)), colnames):
                for col_chi in range(nchi):
                    myfile.write(
                        str(timeseries_chis_matrix[mysim,
                                                   col_num * nchi + col_chi,
                                                   myrow]))
                    myfile.write(" ")
            myfile.write("\n")
        myfile.close()

    return timeseries_chis_matrix
Example #25
0
def deploy_circuitpython_code():
    print("========== DEPLOYING CODE ==========")
    # Wait for the circuitpython drive to show up.
    time.sleep(5)
    cpy_drive = utils.find_drive_by_name("CIRCUITPY")

    utils.clean_pycache(FIRMWARE_DIR)
    utils.clean_pycache(EXAMPLES_DIR)

    os.makedirs(os.path.join(cpy_drive, "lib"), exist_ok=True)

    for src, dst in FILES_TO_DEPLOY.items():
        if src.startswith("https://"):
            http_src, zip_path = src.rsplit(':', 1)

            zip_data = io.BytesIO(requests.get(http_src).content)

            with zipfile.ZipFile(zip_data, "r") as zipfh:
                file_data = zipfh.read(zip_path)

            dst = os.path.join(dst, os.path.basename(zip_path))
            with open(os.path.join(cpy_drive, dst), "wb") as fh:
                fh.write(file_data)

        else:
            if os.path.isdir(src):
                dst = os.path.join(cpy_drive, dst, os.path.basename(src))
                if os.path.exists(dst):
                    shutil.rmtree(dst)
                shutil.copytree(src, dst)
            else:
                shutil.copy(src, os.path.join(cpy_drive, dst))

        print(f"Copied {src} to {dst}")

    utils.flush(cpy_drive)
Example #26
0
def numerical_gradient_vectorized_v1(iobj, x):
    check_vector4_vectorized(x)
    if TEST_ON:
        print "numerical_gradient1",
        flush()
        count = x.shape[0]
        g = np.zeros((count, 4))
        for i in range(x.shape[0]):
            v = x[i, 0:4]
            # inefficient: not vectorised
            g[i, :] = numerical_gradient(iobj, v, is_vectorized=True)
        assert not np.any(np.isnan(g), axis=None)

    print "numerical_gradient2",
    flush()
    g2 = numerical_gradient_vectorized_v2(iobj, x.copy())
    if TEST_ON:
        assert np.allclose(g, g2)
    print "done"
    flush()
    return g2
Example #27
0
async def main():
    await asyncio.gather(request("First"), request("Second"), request("Third"))
    flush()
Example #28
0
def parse_excel_file(file):
    excel_file = open_workbook(file, encoding_override='utf-8')
    flush('Opening excel file...')

    def prepare_mapping():
        s = excel_file.sheet_by_name("Mapping")

        for x in range(1, s.nrows):
            db.session.add(
                db.Lead(account=s.cell(x, 0).value,
                        account_name=s.cell(x, 1).value,
                        lead_code=s.cell(x, 2).value,
                        name=s.cell(x, 3).value))
        flush('Parsing mappings...')
        db.session.commit()

    def define_variables():
        flush('Defining Company names and audit periods...')
        sheet = excel_file.sheet_by_name('Instruction')

        db.tanimlar['company'] = sheet.cell_value(6, 2)
        try:
            db.tanimlar['optional'] = sheet.cell_value(7, 2) or "NO"
        except IndexError:
            flush('There is no optional column...')

        for sheet in excel_file.sheets()[2:]:
            db.periodss.append(sheet.name)

    # donus = dict()
    define_variables()
    db.Hesaplar = db.make_hesaplar()
    db.create_db()
    db.len_periods = len(db.periodss)
    prepare_mapping()

    def find_seperator():
        from collections import defaultdict
        pointer = defaultdict(int)
        stop = 0
        for item in excel_file.sheet_by_index(excel_file.nsheets - 1).col(0):
            if stop == 5:
                return max(pointer.keys(), key=lambda q: pointer[q])
            if not str(item.value).isalnum():
                for char in str(item.value):
                    if not char.isalnum():
                        pointer[char] += 1
                        stop += 1

    for i in range(2, excel_file.nsheets):
        sheet = excel_file.sheet_by_index(i)
        key = sheet.name

        flush('Parsing %s...' % key)

        headers = [col.value.strip() for col in sheet.row(0)]

        desc_col = 0
        bal_col = 0
        opt_col = 999

        for y in range(len(headers)):
            if headers[y].lower() in bakiye:
                bal_col = y
            if headers[y].lower() in hesap_adi:
                desc_col = y
            if headers[y] == db.tanimlar['optional']:
                opt_col = y

        for row in range(1, sheet.nrows):
            temp = {}
            # todo float diye gorulen kalemlerde seperator farkedilip aslinda str'ye cevrilip seperator replace edilebilir.

            for col in range(sheet.ncols):
                if col is 0:
                    temp["number"] = str(sheet.cell_value(
                        row, col))  # burasi duzeltilecek bug var.
                elif col == bal_col:
                    temp[key] = sheet.cell_value(row, col)
                elif col == opt_col:
                    temp['optional'] = sheet.cell_value(row, col)
                elif col == desc_col:
                    temp['name'] = sheet.cell_value(row, col)

            param = 'optional' if not db.tanimlar['optional'] == 'NO' else 'NO'

            if param is 'NO':
                gecici = db.session.query(
                    db.Hesaplar).filter_by(number=temp['number']).first()
            else:
                hink = db.session.query(db.Hesaplar).filter_by(
                    number=temp['number'], **{
                        param: temp[param]
                    }).first()
                gecici = None
                if hink is not None:
                    if int(getattr(hink, key)) is 0:
                        gecici = hink

            ana_hesap = temp['number'][:3]
            lead_cod = None

            if db.session.query(
                    db.Lead).filter_by(account=ana_hesap).first() is not None:
                lead_cod = db.session.query(
                    db.Lead).filter_by(account=ana_hesap).first().lead_code

            if gecici is not None:
                setattr(gecici, key, temp[key])
                flush(temp['number'] + ' updated...', wait=0.0001, code='blue')
            else:
                db.session.add(
                    db.Hesaplar(**temp,
                                ana_hesap=ana_hesap,
                                lead_code=lead_cod,
                                len=len(temp['number'])))
                flush(temp['number'] + ' added...', wait=0.0001, code='blue')

        db.session.commit()
        flush('Period %s parsed...' % key)
Example #29
0
            fake_samples = fake_samples[0,:]
            print("==> (iters: "+str(iter_)+") Fake Samples Summary; mu "+str(mu)+"; sigma "+str(sigma)+" <===")
            a1 = str(fake_samples.shape)
            print("==> Shape: "+a1)
            
            print("==> Vec: "+str(fake_samples))
            
            print("==> Values Greater than mu="+str(mu)+"; n="+str(np.sum(fake_samples > mu)))

            
            a2 = str(np.mean(fake_samples))
            print("==> Mean: "+a2)
            
            a3 = str(np.std(fake_samples))
            print("==> STD: "+a3)
            utils.flush(img_folder)
            # generate_image(sess, batch_data, iter_)
            
        if (np.mod(iter_, FREQ) == 0) or (iter_+1 == ITERS):

            session_saver.save(sess, 'model/my_gan.ckpt')

        utils.tick()  


            


# In[12]:

Example #30
0
def execute_build(build_cfg):
    # set configured tool resolutions

    if not build_cfg.suppress_variant_handling():
        p = build_cfg.variant_cosy()
        cosy = CoSy(p)
        log.info('using variant coordinate system ' + p)
        log.info('  coordinate dimensions: ' + str(cosy.get_dimensions()))
        for d in cosy.get_dimensions():
            log.info('   dimension ' + d + ': ' + str(cosy.get_dimension(d)))
        coords = build_cfg.variant_coords()
        # projection of variant coordinates according to actual coordinate system
        for c in coords.keys():
            if not c in cosy.get_dimensions():
                log.warning("invalid dimension '" + c + "' -> adjusted")
                del coords[c]
        cosy.check_coords(coords)
        build_cfg._variant_vector = cosy.variant_coord_vector(coords)

    ct = build_cfg.configured_tools()
    if len(ct) != 0:
        tools = build_cfg.tools()
        for n in ct.keys():
            tid = ct[n].toolid()
            ct[n]._inst_dir = tools[tid][ct[n].version()]

    #as a fallback, use built-in vmake build plugin
    if not build_cfg.skip_build():
        if build_cfg._externalplugin_setup:
            log.info('-' * 100)
            log.info('| {0:15} {1:80} |'.format('Build plugin:',
                                                build_cfg._build_script_name))
            log.info('| {0:96} |'.format('-' * 96))
            log.info('| {0:15} {1:80} |'.format(
                'version:', build_cfg._build_script_version))
            try:
                log.info('| {0:15} {1:80} |'.format(
                    'description:',
                    build_cfg._externalplugin_setup.get_description()))
            except AttributeError:
                log.info('| {0:15} {1:80} |'.format('description:', ''))
            log.info('|{0:98}|'.format(''))
            try:
                log.info('| {0:15} {1:80} |'.format(
                    'author:', build_cfg._externalplugin_setup.get_author()))
            except AttributeError:
                log.info('| {0:15} {1:80} |'.format('author:', ''))
            try:
                log.info('| {0:15} {1:80} |'.format(
                    'contact:',
                    build_cfg._externalplugin_setup.get_contact_email()))
            except AttributeError:
                log.info('| {0:15} {1:80} |'.format('contact:', ''))
            log.info('|{0:98}|'.format(''))
            log.info('-' * 100)
        build_script = build_cfg.build_script()
        utils.flush()
        build_script.run()
        utils.flush()
        log.info("build succeeded")
    else:
        log.info("build step skipped because of explicitly given option -B")
Example #31
0
   def __init__(self,myname,mynum,mychain,xvg_resnum,basedir,num_sims,max_angles,xvgorpdb,binwidth,sigalpha=1,
                permutations=0,phipsi=0,backbone_only=0,adaptive_partitioning=0,which_runs=None,pair_runs=None,bootstrap_choose=3,
                calc_variance=False, all_angle_info=None, xvg_chidir = "/dihedrals/g_chi/", skip=1, skip_over_steps=0, last_step=None, calc_mutinf_between_sims="yes", max_num_chis=99,
                sequential_res_num = 0, pdbfile = None, xtcfile = None, output_timeseries = "no", minmax=None, bailout_early = False, lagtime_interval = None, markov_samples = 250, num_convergence_points=1):
          
      global master_angles_matrix
      global test_reslist
      global xtc_coords 
      global last_good_numangles # last good value for number of dihedrals
      global NumChis, NumChis_Safe
      self.name = myname
      self.num = mynum
      self.chain = mychain
      self.xvg_basedir = basedir
      self.xvg_chidir = xvg_chidir
      self.xvg_resnum = xvg_resnum
      self.sequential_res_num = sequential_res_num
      self.backbone_only, self.phipsi = backbone_only, phipsi
      self.max_num_chis = max_num_chis
      self.markov_samples = markov_samples
      coarse_discretize = None
      split_main_side = None

      # we will look at mutual information convergence by taking linear subsets of the data instead of bootstraps, but use the bootstraps data structures and machinery. The averages over bootstraps then won't be meaningful
      # however the highest number bootstrap will contain the desired data -- this could be fixed later at the bottom of the code if desired
      # I also had to change some things in routines above that this code references in order to change numangles_bootstrap. We will essentially look at convergence by only looking at subsets of the data
      # in the weaves below, numangles will vary with 


      if(phipsi >= 0): 
             try:
                    self.nchi = self.get_num_chis(myname) * (1 - backbone_only) + phipsi * self.has_phipsi(myname)
             except:
                    NumChis = NumChis_Safe #don't use Ser/Thr hydroxyls for pdb trajectories
                    self.nchi = NumChis[myname] * (1 - backbone_only) + phipsi * self.has_phipsi(myname)
      elif(phipsi == -2):
             split_main_side = True
             if(self.chain == "S"):
                    self.nchi =  self.get_num_chis(myname)
             else:
                    self.nchi = 2 * self.has_phipsi(myname)
      elif(phipsi == -3):
             self.nchi = 3 #C-alpha x, y, z
      elif(phipsi == -4):
             print "doing analysis of stress data"
             self.nchi = 1 # just phi as a placeholder for a single variable
      else:             #coarse discretize phi/psi into 4 bins: alpha, beta, turn, other
             self.nchi = self.get_num_chis(myname) * (1 - backbone_only) + 1 * self.has_phipsi(myname)
             coarse_discretize = 1
             phipsi = 1
      if(xtcfile != None):
          self.nchi = 3 # x, y, z
      self.symmetry = ones((self.nchi),int16)
      self.numangles = zeros((num_sims),int32)
      self.num_sims = num_sims
      self.which_runs = array(which_runs)
      which_runs = self.which_runs
      #which_runs=array(self.which_runs)
      self.pair_runs = pair_runs
      self.permutations= permutations
      self.calc_mutinf_between_sims = calc_mutinf_between_sims
      if(bootstrap_choose == 0):
        bootstrap_choose = num_sims
      #print "bootstrap set size: "+str(bootstrap_choose)+"\n"
      #print "num_sims: "+str(num_sims)+"\n"
      #print self.which_runs
      #print "\n number of bootstrap sets: "+str(len(self.which_runs))+"\n"

      #check for free memory at least 15%
      #check_for_free_mem()
      
      #allocate stuff
      bootstrap_sets = self.which_runs.shape[0]

      #check num convergence points
      if num_convergence_points > 1:
             assert(num_convergence_points == bootstrap_sets)

      self.entropy =  zeros((bootstrap_sets,self.nchi), float64)
      self.entropy2 =  zeros((bootstrap_sets,self.nchi), float64) #entropy w/fewer bins
      self.entropy3 =  zeros((bootstrap_sets,self.nchi), float64) #entropy w/fewer bins
      self.entropy4 =  zeros((bootstrap_sets,self.nchi), float64) #entropy adaptive
      self.var_ent =  zeros((bootstrap_sets,self.nchi), float64)
      self.numangles_bootstrap = zeros((bootstrap_sets),int32)
      print "\n#### Residue: "+self.name+" "+self.num+" "+self.chain+" torsions: "+str(self.nchi), utils.flush()
      binwidth = float(binwidth)
      bins = arange(0,360, binwidth) #  bin edges global variable
      nbins=len(bins) # number of bins
      nbins_cor = int(nbins * FEWER_COR_BTW_BINS);
      self.nbins=nbins
      self.nbins_cor=nbins_cor
      sqrt_num_sims=sqrt(num_sims)
      self.chi_pop_hist=zeros((bootstrap_sets, self.nchi,nbins),float64)
      self.chi_counts=zeros((bootstrap_sets, self.nchi, nbins), float64) #since these can be weighted in advanced sampling
      #self.chi_var_pop=zeros((bootstrap_sets, self.nchi,nbins),float64)
      self.chi_pop_hist_sequential=zeros((num_sims, self.nchi, nbins_cor), float64)
      num_histogram_sizes_to_try = 2  # we could try more and pick the optimal size
      self.chi_counts_sequential=zeros((num_sims, self.nchi, nbins_cor), float64) #half bin size
      self.chi_counts_sequential_varying_bin_size=zeros((num_histogram_sizes_to_try, num_sims, self.nchi, int(nbins*(num_histogram_sizes_to_try/2)) ), float64) #varying bin size
      self.angles_input = zeros((self.nchi,num_sims,max_angles),float64)         # the dihedral angles, with a bigger array than will be needed later

      #self.sorted_angles = zeros((self.nchi,num_sims,max_angles),float64) # the dihedral angles sorted
      self.ent_hist_left_breaks = zeros((self.nchi, nbins * MULT_1D_BINS + 1),float64)
      self.adaptive_hist_left_breaks = zeros((bootstrap_sets, nbins + 1),float64) #nbins plus one to define the right side of the last bin
      self.adaptive_hist_left_breaks_sequential = zeros(( num_sims, nbins_cor + 1 ),float64)  #nbins_cor plus one to define the right side of the last bin
      self.adaptive_hist_binwidths = zeros((bootstrap_sets, nbins ),float64) 
      self.adaptive_hist_binwidths_sequential = zeros(( num_sims, nbins_cor ),float64)
      self.ent_hist_binwidths = zeros((bootstrap_sets, self.nchi, nbins * MULT_1D_BINS),float64)
      self.ent_from_sum_log_nn_dists = zeros((bootstrap_sets, self.nchi, MAX_NEAREST_NEIGHBORS),float64)
      self.minmax = zeros((2,self.nchi))
      self.minmax[1,:] += 1 #to avoid zero in divide in expand_contract angles



      if(phipsi >= 0): 
             self.nchi = self.get_num_chis(myname) * (1 - backbone_only) + phipsi * self.has_phipsi(myname)
      elif(phipsi == -2):
             split_main_side = True
             if(self.chain == "S"):
                    self.nchi =  self.get_num_chis(myname)
             else:
                    self.nchi = 2 * self.has_phipsi(myname)
      elif(phipsi == -3):
             self.nchi = 3 #C-alpha x, y, z
      elif(phipsi == -4):
             print "doing analysis of stress data"
             self.nchi = 1 # just phi as a placeholder for a single variable
      else:             #coarse discretize phi/psi into 4 bins: alpha, beta, turn, other
             self.nchi = self.get_num_chis(myname) * (1 - backbone_only) + 1 * self.has_phipsi(myname)
             coarse_discretize = 1
             phipsi = 1
      if(xtcfile != None):
          self.nchi = 3 # x, y, z
      self.symmetry = ones((self.nchi),int16)
      self.numangles = zeros((num_sims),int32)
      self.num_sims = num_sims
      self.which_runs = array(which_runs)
      which_runs = self.which_runs
      #which_runs=array(self.which_runs)
      self.pair_runs = pair_runs
      self.permutations= permutations
      self.calc_mutinf_between_sims = calc_mutinf_between_sims
      if(bootstrap_choose == 0):
        bootstrap_choose = num_sims
      #print "bootstrap set size: "+str(bootstrap_choose)+"\n"
      #print "num_sims: "+str(num_sims)+"\n"
      #print self.which_runs
      #print "\n number of bootstrap sets: "+str(len(self.which_runs))+"\n"

      #check for free memory at least 15%
      #check_for_free_mem()
      
      #allocate stuff
      bootstrap_sets = self.which_runs.shape[0]

      #check num convergence points
      if num_convergence_points > 1:
             assert(num_convergence_points == bootstrap_sets)

      
      self.numangles_bootstrap = zeros((bootstrap_sets),int32)
      print "\n#### Residue: "+self.name+" "+self.num+" "+self.chain+" torsions: "+str(self.nchi), utils.flush()

      if(xvgorpdb == "xvg"):
         self._load_xvg_data(basedir, num_sims, max_angles, xvg_chidir, skip,skip_over_steps,last_step, coarse_discretize, split_main_side)
      if(xvgorpdb == "pdb"):
         self._load_pdb_data(all_angle_info, max_angles)
      if(xvgorpdb == "xtc"):
         self.load_xtc_data(basedir, num_sims, max_angles, xvg_chidir, skip, skip_over_steps, pdbfile, xtcfile)

      #resize angles array to get rid of trailing zeros, use minimum number
      print "weights"
      print self.weights

      #print "resizing angles array, and creating arrays for adaptive partitioning" 
      min_num_angles = int(min(self.numangles))
      max_angles = int(min_num_angles)
      if(min_num_angles > 0):
             last_good_numangles = min_num_angles
      self.angles = zeros((self.nchi, num_sims, min_num_angles))
      angles_autocorrelation = zeros((self.nchi, bootstrap_sets, min_num_angles), float64)
      #bins_autocorrelation =   zeros((self.nchi, bootstrap_sets, min_num_angles), float64)
      self.boot_sorted_angles = zeros((self.nchi,bootstrap_sets,bootstrap_choose*max_angles),float64)
      self.boot_ranked_angles = zeros((self.nchi,bootstrap_sets,bootstrap_choose*max_angles),int32) 
      self.boot_weights = zeros((bootstrap_sets,bootstrap_choose*max_angles),float64) 
      self.rank_order_angles = zeros((self.nchi,num_sims,max_angles),int32) # the dihedral angles
                                                                        # rank-ordered with respect to all sims together
      self.rank_order_angles_sequential = zeros((self.nchi,num_sims,max_angles),int32) # the dihedral angles
                                                                        # rank-ordered for each sim separately
                                                                        # for mutinf between sims
      #max_num_angles = int(max(self.numangles))
      max_num_angles = int(min(self.numangles)) #to avoid bugs
      

      #counts_marginal=zeros((bootstrap_sets,self.nchi,nbins),float32) # normalized number of counts per bin, 


      #print "initialized angles_new array"
      self.numangles[:] = min(self.numangles)
      #print "new numangles"
      #print self.numangles
      for mychi in range(self.nchi):
             for num_sim in range(num_sims):
                    self.angles[mychi,num_sim,:min_num_angles] = self.angles_input[mychi,num_sim,:min_num_angles]
      #print "done copying angles over"
      del self.angles_input #clear up memory space
      #print self.angles
      
      
      

      if ((self.name in ("GLY", "ALA")) and (phipsi == 0 or phipsi == -2)): 
           # First prepare chi pop hist and chi pop hist sequential needed for mutual information -- just dump everything into one bin, 
           # giving entropy zero, which should also give MI zero, but serve as a placeholder in the mutual information matrix
           if(last_good_numangles > 0):  
                  self.numangles[:] = last_good_numangles # dummy value  
           self.numangles_bootstrap[:] = bootstrap_choose *  int(min(self.numangles))
           
                  
           
           return   #if the side chains don't have torsion angles, drop out

      self.sorted_angles = zeros((self.nchi, sum(self.numangles)),float64)
      
      if(xvgorpdb == "xvg" or (xvgorpdb == "pdb" and phipsi != -3)): #if not using C-alphas from pdb
             self.correct_and_shift_angles(num_sims,bootstrap_sets,bootstrap_choose, coarse_discretize)
      elif(xvgorpdb == "xtc" or (xvgorpdb == "pdb" and phipsi == -3)) : #if using xtc cartesians or pdb C-alphas 
             self.correct_and_shift_carts(num_sims,bootstrap_sets,bootstrap_choose, num_convergence_points)
             
      if(minmax == None):
             print "getting min/max values"
             mymin = zeros(self.nchi)
             mymax = zeros(self.nchi)
             for mychi in range(self.nchi):
                    mymin[mychi] =  min((self.angles[mychi,:,:min(self.numangles)]).flatten())
                    mymax[mychi] =  max((self.angles[mychi,:,:min(self.numangles)]).flatten())
             print "__init__ mymin: "
             print mymin
             print "__init__ mymax: "
             print mymax
             self.minmax[0, :] = mymin
             self.minmax[1, :] = mymax
             for mychi in range(self.nchi):
                    if(self.minmax[1,mychi] - self.minmax[0,mychi] <= 0):
                           self.minmax[1,mychi] = self.minmax[0,mychi] + 1
             print self.minmax
      else:
             self.minmax = minmax
             self.expand_contract_data(num_sims,bootstrap_sets,bootstrap_choose)
      
      if(master_angles_matrix == None):
          master_angles_matrix=zeros((run_params.num_sims, len(test_reslist), min_num_angles),float64) #nchi is 1 for stress analysis

      
      master_angles_matrix[:,sequential_res_num,:] = self.angles[0,:,:]
      print "master angles matrix: "
      print master_angles_matrix[0]
      del self.angles #cleanup
      del self.rank_order_angles
      del self.rank_order_angles_sequential
      del self.sorted_angles
      del self.boot_sorted_angles
      del self.boot_ranked_angles
        print(TAG(), "thread_backup:", "START BACKUP ROUTINE")
        watcher_manager.save_watchers()
        print(TAG(), "thread_backup:", "BACKUP COMPLETED")

        flush()


updater = Updater(token=TOKEN)
updater.dispatcher.add_handler(CommandHandler('set', set_function))
updater.dispatcher.add_handler(CommandHandler('del', del_function))
updater.dispatcher.add_handler(CommandHandler('clear', clear_function))
updater.dispatcher.add_handler(CommandHandler('list', list_function))
updater.dispatcher.add_handler(CommandHandler('start', start_function))
updater.dispatcher.add_handler(CommandHandler('stop', stop_function))

# set watchers backup
t = Thread(target=backup)
t.start()

# start watchers thread
watcher_manager.start()

# start telegram
updater.start_polling()
print(TAG(), "Bot started!")

flush()

# start idle
updater.idle()
Example #33
0
# !/usr/bin/python
# -*- coding: utf-8 -*-
from itertools import islice
import neo4j

import utils

if __name__ == '__main__':
    driver = neo4j.GraphDatabase.driver(uri='bolt://35.204.164.2:7687', auth=('neo4j', 'cdert_433450'))
    query = utils.clear()
    # stage 1
    utils.flush(query, driver=driver)

    # stage 2
    with open('C:\Users\user\PycharmProjects\untitled1\wikidata\data.json') as f:
        bufer = f.read()
    driver.close()
Example #34
0
def output_timeseries_chis(myfilename_prefix,myreslist,colnames, nsims = 6, nchi=6, ):
   #print "shape of matrix to ouput:"+str(mymatrix.shape)+"\n"
   min_num_angles = min(myreslist[0].numangles)
   timeseries_chis_matrix = zeros((nsims, len(myreslist) * nchi, min_num_angles), float64) #initialize
   #self.angles = zeros((self.nchi,num_sims,max_angles),float64)         # the dihedral angles

   for res_ind1, myres1 in zip(range(len(myreslist)), myreslist):
                       print "\n#### Working on residue %s (%s):" % (myres1.num, myres1.name) , utils.flush()
                       for mychi1 in range(myres1.nchi):
                              #print "%s chi: %d/%d" % (myres1.name,int(myres1.num),mychi1+1)
                              #print "res_ind1: "+str(res_ind1)
                              #rint "mychi1: "+str(mychi1)
                              #print "nchi: " +str(nchi)
                              #print "min_num_angles: "+str(min_num_angles)
                              #print "res_ind1 * nchi + mychi1: "+str(res_ind1 * nchi + mychi1)
                              #print "myres1.angles: "
                              #print myres1.angles
                              #print "angle entries: "
                              #print myres1.angles[mychi1, :, :min_num_angles]
                              timeseries_chis_matrix[:, res_ind1 * nchi + mychi1, :] = myres1.angles[mychi1, :, :min_num_angles]
   my_file_list = []                           
   for mysim in range(nsims):
          myfile = open(myfilename_prefix + "_" + str(mysim) + ".txt",'w')
          for col_num, col_name in zip(range(len(colnames)), colnames):
                 for col_chi in range(nchi):
                        myfile.write(col_name + "_" +str(col_chi) + " ")
                 myfile.write("\n")
   
          for myrow in range(min_num_angles):
                 for col_num, col_name in zip(range(len(colnames)), colnames):
                        for col_chi in range(nchi):  
                               myfile.write(str(timeseries_chis_matrix[mysim,col_num * nchi + col_chi, myrow]))
                               myfile.write(" ")
                 myfile.write("\n")
          myfile.close()

   return timeseries_chis_matrix
Example #35
0
                info = engineInfo.read_info_file(engine_path)
                executor = executors.make_executor(info)

                for config_name in options.configs:
                    config = configs.getConfig(config_name, info)
                    if config.omit():
                        continue

                    try:
                        results = executor.run(benchmark, config)
                        if not results:
                            continue
                    except Exception as e:
                        log('Failed to run ' + engine_path + ' - ' + benchmark.version + ' - ' + config_name + '!')
                        log('Exception: ' +  repr(e))
                        import traceback
                        traceback.print_exc()
                        continue

                    mode = submitter.mode(info["engine_type"], config_name)
                    submitter.add_tests(results, benchmark.suite, benchmark.version, mode)

                    # Try to preserve order of logs.
                    utils.flush()

        if not options.session:
            submitter.finish()

        log("my work is done here!")
        utils.flush()