Exemplo n.º 1
0
async def restart(ctx):
    """Restarts the bot."""
    def check(msg):
        if msg:
            return msg.content.lower().strip() == 'y' or msg.content.lower().strip() == 'n'
        else:
            return False

    latest = update_bot(True)
    if latest:
        await bot.send_message(ctx.message.channel, bot.bot_prefix + 'There is an update available for the bot. Download and apply the update on restart? (y/n)')
        reply = await bot.wait_for_message(timeout=10, author=ctx.message.author, check=check)
        with open('restart.txt', 'w', encoding="utf8") as re:
            re.write(str(ctx.message.channel.id))
        if not reply or reply.content.lower().strip() == 'n':
            print('Restarting...')
            await bot.send_message(ctx.message.channel, bot.bot_prefix + 'Restarting...')
        else:
            await bot.send_message(ctx.message.channel, content=None, embed=latest)
            with open('quit.txt', 'w', encoding="utf8") as q:
                q.write('update')
            print('Downloading update and restarting...')
            await bot.send_message(ctx.message.channel, bot.bot_prefix + 'Downloading update and restarting (check your console to see the progress)...')

    else:
        print('Restarting...')
        with open('restart.txt', 'w', encoding="utf8") as re:
            re.write(str(ctx.message.channel.id))
        await bot.send_message(ctx.message.channel, bot.bot_prefix + 'Restarting...')

    if bot.subpro:
        bot.subpro.kill()
    os._exit(0)
Exemplo n.º 2
0
async def update(ctx, msg: str = None):
    """Update the bot if there is an update available."""
    if msg:
        latest = update_bot(False) if msg == 'show' else update_bot(True)
    else:
        latest = update_bot(True)
    if latest:
        if not msg == 'show':
            if embed_perms(ctx.message):
                try:
                    await ctx.send(content=None, embed=latest)
                except:
                    pass
            await ctx.send(
                bot.bot_prefix +
                'There is an update available. Downloading update and restarting (check your console to see the progress)...'
            )
        else:
            try:
                await ctx.send(content=None, embed=latest)
            except:
                pass
            return
        with open('quit.txt', 'w', encoding="utf8") as q:
            q.write('update')
        with open('restart.txt', 'w', encoding="utf8") as re:
            re.write(str(ctx.message.channel.id))
        if bot.subpro:
            bot.subpro.kill()
        os._exit(0)
    else:
        await ctx.send(bot.bot_prefix + 'The bot is up to date.')
Exemplo n.º 3
0
    def test_new(self):
        frame = cell.add_worksheet("frame0")
        re = RE(1, 2, 0, cell=cell)
        re.reFormat.set_fg_color("red")
        re.write(frame)

        re = RE(3, 7, cell=cell)
        re.reFormat.set_fg_color("red")
        re.write(frame)
        print(re)
        cell.close()
Exemplo n.º 4
0
def simple_upload(remote_info, data):
    """Upload generated files to specified host using rsync
    """
    include = ['--include=*/']
    for fcopy in data['to_copy']:
        include.extend(["--include", "{}**/*".format(fcopy)])
        include.append("--include={}".format(fcopy))
    # By including both these patterns we get the entire directory
    # if a directory is given, or a single file if a single file is
    # given.

    cl = ["rsync", \
          "--checksum", \
          "--recursive", \
          "--links", \
          "-D", \
          "--partial", \
          "--progress", \
          "--prune-empty-dirs"
          ]

    # file / dir inclusion specification
    cl.extend(include)
    cl.append("--exclude=*")

    # source and target
    cl.extend([
          # source
          data["directory"], \
          # target
          "{store_user}@{store_host}:{store_dir}".format(**remote_info)
         ])

    logdir = remote_info.get("log_dir",os.getcwd())
    rsync_out = os.path.join(logdir,"rsync_transfer.out")
    rsync_err = os.path.join(logdir,"rsync_transfer.err")
    ro = open(rsync_out, 'a')
    re = open(rsync_err, 'a')
    try:
        ro.write("-----------\n{}\n".format(" ".join(cl)))
        re.write("-----------\n{}\n".format(" ".join(cl)))
        ro.flush()
        re.flush()
        subprocess.check_call(cl, stdout=ro, stderr=re)
    except subprocess.CalledProcessError, e:
        logger2.error("rsync transfer of {} FAILED with (exit code {}). " \
                      "Please check log files {:s} and {:s}".format(data["directory"],
                                                                    str(e.returncode),
                                                                    rsync_out,
                                                                    rsync_err))
        raise e
Exemplo n.º 5
0
def process_second_read(*args, **kwargs):
    """Processing to be performed after all reads have been sequenced
    """
    dname, config = args[0:2]
    logger2.info("The instrument has finished dumping on directory %s" % dname)

    utils.touch_indicator_file(os.path.join(dname, "second_read_processing_started.txt"))
    _update_reported(config["msg_db"], dname)
    fastq_dir = None

    # Do bcl -> fastq conversion and demultiplexing using Casava1.8+
    if kwargs.get("casava", False):
        if not kwargs.get("no_casava_processing", False):
            logger2.info("Generating fastq.gz files for {:s}".format(dname))
            _generate_fastq_with_casava(dname, config)
            # Merge demultiplexing results into a single Unaligned folder
            utils.merge_demux_results(dname)
            #Move the demultiplexing results
            if config.has_key('mfs_dir'):
                fc_id = os.path.basename(dname)
                cl = ["rsync", \
                      "--checksum", \
                      "--recursive", \
                      "--links", \
                      "-D", \
                      "--partial", \
                      "--progress", \
                      "--prune-empty-dirs", \
                      os.path.join(dname, 'Unaligned'), \
                      os.path.join(config.get('mfs_dir'), fc_id)
                      ]
                logger2.info("Synching Unaligned folder to MooseFS for run {}".format(fc_id))
                logdir = os.path.join(config.get('log_dir'), os.getcwd())
                rsync_out = os.path.join(logdir,"rsync_transfer.out")
                rsync_err = os.path.join(logdir,"rsync_transfer.err")

                with open(rsync_out, 'a') as ro:
                    with open(rsync_err, 'a') as re:
                        try:
                            ro.write("-----------\n{}\n".format(" ".join(cl)))
                            re.write("-----------\n{}\n".format(" ".join(cl)))
                            subprocess.check_call(cl, stdout=ro, stderr=re)
                        except subprocess.CalledProcessError, e:
                            logger2.error("rsync transfer of Unaligned results FAILED")
Exemplo n.º 6
0
import re
a="2+1+3+3123123"
r=re.findall(r"\d{0,}[\+\-]{0,}",a)
print(r)
with open("re.txt","w") as re:
    for i in r:
        re.write(i)
with open("re.txt","r") as r:
    r=r.read()
print(r)
Exemplo n.º 7
0
def denotate(s='test'):

    count1, count2 = 0, 0
    all_fields, _ = get_fields()
    all_fields = all_fields[sub]

    ta_file = os.path.join(path, '%s_%s.ta' % (sub, s))
    qu_file = os.path.join(path, '%s_%s.qu' % (sub, s))
    #newly generated question file
    question_file = os.path.join(path, '%s.qu' % s)
    lon_file = os.path.join(path, '%s_%s.lon' % (sub, s))
    #original lon file with qualified lon
    lon_file0 = os.path.join(path, '%s_%s0.lon' % (sub, s))

    with gfile.GFile(ta_file, mode='r') as t, gfile.GFile(
            qu_file, mode='r') as q, gfile.GFile(
                question_file, mode='w') as re, gfile.GFile(
                    lon_file0, mode='w') as lon0, gfile.GFile(lon_file,
                                                              mode='r') as lon:
        templates = t.readlines()
        questions = q.readlines()
        lons = lon.readlines()
        assert len(templates) == len(questions)
        for template, question, lon in zip(templates, questions, lons):
            t_tokens = template.split()
            q_tokens = question.split()
            assert len(t_tokens) == len(q_tokens)
            new = ''
            for t_token, q_token in zip(t_tokens, q_tokens):
                if t_token == '<nan>' or t_token == '<count>':
                    new += q_token
                else:
                    words = t_token.split(':')
                    new += ('<' + words[0][1] + words[2] + '>')
                    if words[0][1] == 'f':
                        #if words[0][1]=='f' or words[0][1]=='v':
                        new += ' '
                        new += q_token
                        new += ' '
                        new += '<eof>'
                new += ' '
            new += '<eos> '

            for i, f in enumerate(all_fields):
                new += '<c' + str(i) + '> ' + f + ' <eoc> '

            if _chech_sketch(lon):
                re.write(new + '\n')
                lon0.write(lon)
                count1 += 1

    print('question file done.')

    lox_file = os.path.join(path, '%s_%s.lox' % (sub, s))
    lon_file = os.path.join(path, '%s_%s.lon' % (sub, s))
    #newly generated logic file
    lo_file = os.path.join(path, '%s.lon' % s)
    with gfile.GFile(lox_file, mode='r') as lox, gfile.GFile(
            lon_file, mode='r') as lon, gfile.GFile(lo_file, mode='w') as re:
        loxs = lox.readlines()
        lons = lon.readlines()
        n = len(lons)
        error = 0

        assert len(lons) == len(loxs)
        #newline is redenotdated file
        for lox, lon in zip(loxs, lons):
            lo_tokens = lox.split()
            lon_tokens = lon.split()

            t_tokens = template.split()
            q_tokens = question.split()
            assert len(t_tokens) == len(q_tokens)

            new = ''
            for idx, (lo_token,
                      lon_token) in enumerate(zip(lo_tokens, lon_tokens)):
                if ':' in lo_token and len(lo_token.split(':')) == 3:
                    words = lo_token.split(':')
                    if False and words[0][1] == 'f':
                        new += ('<' + words[0][1] + words[2] + '>')
                        new += ' '
                        new += lon_token
                        new += ' '
                        new += '<eof>'
                    else:
                        new += ('<' + words[0][1] + words[2] + '>')
                elif lo_token == '<count>':
                    new += lon_token
                elif lo_token == 'true':
                    ws = lo_tokens[idx - 2].split(':')
                    new += ('<v' + words[2] + '>')
                else:
                    new += lo_token
                new += ' '

            if _chech_sketch(lon):
                re.write(new + '\n')
                count2 += 1

    assert count1 == count2
    print('logic file done.')
Exemplo n.º 8
0
import pandas as pd
from collections import Counter
import re
import os
import json

res_list=[]
file_path='/home/none404/hm/baidu_qa/MRC_result/MRC/'
file_list=os.listdir(file_path)
for file in file_list:
    path=os.path.join(file_path,file)
    with open(path,'r') as f:
        res_dict=json.load(f)
    res_list.append(res_dict)
c=0
sub_dict={}
for id in res_dict.keys():
    answer_list=[res_list[i][id] for i in range(len(res_list))]
    while '' in answer_list:
        answer_list.remove('')
    if answer_list==[]:
        c+=1
        answer=''
    else:
        answer=Counter(answer_list).most_common()[0][0]
    sub_dict[id]=answer

with open('/home/none404/hm/baidu_qa/ensemble_result/'+'sub_result_file.json','w') as re:
    R = json.dumps(sub_dict, ensure_ascii=False, indent=4)
    re.write(R)
Exemplo n.º 9
0
    # Compile body
    body = data.replace("{name}", name )\
      .replace("{fname}", fname )\
      .replace("{lname}", lname )\
      .replace("{email}", email)\
      .replace("{date}",datetime.datetime.today().strftime("%d/%m/%Y"))\
      .replace("{b64email}",base64.b64encode(email))\
      .replace("{b64remail}",base64.b64encode(email)[::-1])

    if re.search("{randomint}", body):
        ri = random.randint(1, 9999999)
        print "Random integer: " + email + " : " + str(ri)
        body = body.replace("{randomint}", str(ri))
        randomints = True
        fp = open(intsfile, "a")
        re.write(email + ":" + str(ri))
        fp.close()

    msg.attach(MIMEText(body, "html"))
    if args.text:
        msg.attach(MIMEText(html2text.html2text(body), 'plain'))

    # Find any embedded images and attach
    attachments = re.findall('src="cid:([^"]+)"', body)
    for attachment in attachments:
        fp = open(attachment, "rb")
        img = MIMEImage(fp.read())
        fp.close()
        img.add_header('Content-ID', attachment)
        msg.attach(img)