Пример #1
0
def ffunc(w0, w1, keyyielder):
    t0 = gfunc(w0, keyyielder)
    t1 = gfunc(w1, keyyielder)
    f0 = (t0 + 2 * t1 +
          c.concat(keyyielder(), keyyielder())) % c.twosquaresixteen
    f1 = (2 * t0 + t1 +
          c.concat(keyyielder(), keyyielder())) % c.twosquaresixteen
    return [f0, f1]
Пример #2
0
def pool_run(target, argss):
    procs=[]
    ques=[]
    for args in argss:
        ques.append(multiprocessing.JoinableQueue()) 
        procs.append(multiprocessing.Process(target=target,args=common.concat([[ques[-1]],args])))
    for p in procs:
        p.daemon=True
        p.start()
    while True:
        logger.error("######### pool_run looop begin")
        for que in ques:
            logger.error("######### Checking QUEUE")
            try:
                while True:
                    logger.error("######### Checking QUEUE while loop")
                    logstd.info("gotque={0}".format(que.get(block=False)))
                    que.task_done()
            except Queue.Empty as e:
                pass
        logger.error("######### sleep BEGIN")
        time.sleep(1)
        logger.error("######### sleep END")
        deadps=[i for (i,p) in enumerate(procs) if not p.is_alive()]
        logger.error("######### deadps = {0}".format(deadps))
        for i in deadps:
            logger.error("######### joining deadps[{0}]".format(i))
            procs[i].join()
        procs=[p for (i,p) in enumerate(procs) if not i in deadps]
        if not procs: break
        logger.error("######### pool_run loop end")
    return 
Пример #3
0
def compress_web(out_file, input_fnames, level=DEFAULT_LEVEL, verbose=False):
    """
    Compresses files using google's closure web service. It has caveats:
    no big wads of JS, and it has a quota.
    """

    out_file = out_file or OUT_FILE

    p('Compressing %s ...' % out_file, verbose)
    s = StringIO.StringIO()

    # This is a very ghetto way to not compress the entire thing at once.
    # "What if I have big files!?" you say. Well, make them smaller.
    parts = 4
    for i in range(0, len(input_fnames)+1, parts):
        if input_fnames[i:i+parts]:
            compressed = compress(common.concat(input_fnames[i:i+parts]), COMPILATION_LEVELS[level])
            s.write(compressed)

    s = s.getvalue()
    p('Compressed to %d' % (len(s)), verbose)

    out = open(out_file, 'w')
    out.write(s)
    out.close()
    return 0
Пример #4
0
def gfunc(w, keyyielder):
    g1 = c.getbyteAt(w, 1)
    g2 = c.getbyteAt(w, 0)
    g3 = c.fvalue(g2 ^ keyyielder()) ^ g1
    g4 = c.fvalue(g3 ^ keyyielder()) ^ g2
    g5 = c.fvalue(g4 ^ keyyielder()) ^ g3
    g6 = c.fvalue(g5 ^ keyyielder()) ^ g4
    return c.concat(g5, g6)
Пример #5
0
def parse(rdict, backlog, pipe):
    cols=""
    for rawline in backlog:
        line=rawline.strip()
        #
        if not line: cotinue
        eval=rdict[line]
        if not eval or "junk" in eval[1].groupdict(): continue
        if "cols" in eval[1].groupdict():
            cols=eval[1].groupdict()["cols"]
            continue
        if "vals" in eval[1].groupdict():
            yield dict(common.concat([
                zip(
                    cols.strip().split(),
                    eval[1].groupdict()["vals"].strip().split()
                ),
                [("time", common.gentime() if not eval[1].groupdict()["time"] else eval[1].groupdict()["time"])]
            ]))
        #
    #for line in pipe.stdout:
    while True:
        rawline = pipe.stdout.readline()
        if not rawline:
            if not pipe.poll():
                return
            else:
                continue
        line=rawline.strip()
        logger.debug("getline=[{0}]".format(line.replace("\n","\\n").replace("\t","\\t")))
        #
        if not line: continue
        eval=rdict[line]
        if not eval or "junk" in eval[1].groupdict(): continue
        if "cols" in eval[1].groupdict():
            cols=eval[1].groupdict()["cols"]
            continue
        if "vals" in eval[1].groupdict():
            yield dict(common.concat([
                zip(
                    cols.strip().split(),
                    eval[1].groupdict()["vals"].strip().split()
                ),
                [("time", common.gentime() if not eval[1].groupdict()["time"] else eval[1].groupdict()["time"])]
            ]))
Пример #6
0
def __encrypt__(p, k, keyyielder):
    w = c.whitening(p, k)
    arr = [
        c.getbyteAt(w, 3, 2),
        c.getbyteAt(w, 2, 2),
        c.getbyteAt(w, 1, 2),
        c.getbyteAt(w, 0, 2)
    ]
    for rnd in range(c.numround):
        arr = roundfunc(arr, keyyielder)
    y = c.concat(c.concat(arr[2], arr[3], 2), c.concat(arr[0], arr[1], 2), 4)
    ci = c.whitening(y, k)
    return [
        c.getbyteAt(ci, 3, 2),
        c.getbyteAt(ci, 2, 2),
        c.getbyteAt(ci, 1, 2),
        c.getbyteAt(ci, 0, 2)
    ]
Пример #7
0
def main(opts):
    if not opts["args"]:
        sys.exit()

    pool_run(collector_loop, [(
             CMDS[arg].format(interval=opts["interval"]) if arg in CMDS else arg, #cmd
             common.regex_dict(dict(common.concat([ d.items() for d in [pars_vmstat.rdict,pars_sar.rdict,] ]))),
             "{0}.{1}".format(arg,common.nowstr("%Y%m%dT%H%M%S")).replace(" ","_") #logname
        ) for arg in opts["args"]])
Пример #8
0
def compress_local(out_file, input_fnames, type=TYPE_JS, verbose=False, level=None):
    """
    """
    
    out_file = out_file or ('compressed.'+TYPE_JS)
    p('Compressing %s ...' % out_file, verbose)
    
    s = common.concat(input_fnames)
    s = compress(s, type)
    out = open(out_file, 'w')
    out.write(s)
    out.close()
Пример #9
0
def _expand_tuple(left, tuple_):
    from pipes import Worker
    from common import concat

    def find_left_end(iter):
        while hasattr(iter, 'left'):
            iter = iter.left
        return iter

    iters = []
    for it in tuple_:
        leftmost = find_left_end(it)
        
        if isinstance(leftmost, Worker):
            # When we have a worker on the left side, then it is an unbound one
            # so we automatically bind to the 'left' 
            iters.append(left | concat(it))
        elif not hasattr(it, '__iter__'):
            # If it's not iterable it is a literal value, we repeat
            iters.append(repeat(it))
        else:
            # We have an iterator we can 'execute'
            iters.append(it)
    return iters
Пример #10
0
def build_lanenet2(input_shape,
                   input_shape1=[128, 128, 3],
                   input_shape2=[64, 64, 3],
                   input_shape3=[32, 32, 3],
                   input_shape4=[16, 16, 3],
                   one_hot_label=False):
    print(input_shape, 'Input size shape ~')

    inputs1 = Input((input_shape))
    inputs2 = Input((input_shape))
    n_filters = 64

    inputs_waves1 = Input((input_shape1))
    inputs_waves2 = Input((input_shape2))
    inputs_waves3 = Input((input_shape3))
    inputs_waves4 = Input((input_shape4))
    #wavelet1, wavelet2, wavelet3, wavelet4 = C.lanenet_wavelet(inputs)

    res_1_1 = resnet_block(inputs1, n_filters)
    skip_1_1 = downBlock(res_1_1)
    skip_1_1 = C.concat(skip_1_1, inputs_waves1)

    res_2_1 = resnet_block(inputs2, n_filters)
    skip_2_1 = downBlock(res_2_1)

    res_1_2 = resnet_block(skip_1_1, 2 * n_filters, blocks=1)
    skip_1_2 = downBlock(res_1_2)
    skip_1_2 = C.concat(skip_1_2, inputs_waves2)

    res_2_2 = resnet_block(skip_2_1, 2 * n_filters, blocks=1)
    skip_2_2 = downBlock(res_2_2)

    res_1_3 = resnet_block(skip_1_2, 4 * n_filters, blocks=1)
    skip_1_3 = downBlock(res_1_3)
    skip_1_3 = C.concat(skip_1_3, inputs_waves3)

    res_2_3 = resnet_block(skip_2_2, 4 * n_filters, blocks=1)
    skip_2_3 = downBlock(res_2_3)

    res_1_4 = resnet_block(skip_1_3, 8 * n_filters)
    skip_1_4 = downBlock(res_1_4)
    skip_1_4 = C.concat(skip_1_4, inputs_waves4)

    res_2_4 = resnet_block(skip_2_3, 8 * n_filters)
    skip_2_4 = downBlock(res_2_4)

    res_1_5 = resnet_block(skip_1_4, 8 * n_filters)
    skip_1_5 = downBlock(res_1_5)
    # skip_1_4 = C.concat(skip_1_4, inputs_waves4)

    res_2_5 = resnet_block(skip_2_4, 8 * n_filters)
    skip_2_5 = downBlock(res_2_5)

    bridge_merge = add(skip_1_5, skip_2_5)
    bridge = resnet_block(bridge_merge, 16 * n_filters)

    # Reference Architecture

    up_4 = upBlock(bridge, 8 * n_filters)
    merge_4 = concat(up_4, skip_1_4, skip_2_4, third=True)
    dec_4 = resnet_block(merge_4, 8 * n_filters)

    up_3 = upBlock(dec_4, 4 * n_filters)
    merge_3 = concat(up_3, skip_1_3, skip_2_3, third=True)
    dec_3 = resnet_block(merge_3, 4 * n_filters)

    up_2 = upBlock(dec_3, 2 * n_filters)
    merge_2 = concat(up_2, skip_1_2, skip_2_2, third=True)
    dec_2 = resnet_block(merge_2, 2 * n_filters)

    up_1 = upBlock(dec_2, n_filters)
    merge_1 = concat(up_1, skip_1_1, skip_2_1, third=True)
    dec_1 = resnet_block(merge_1, n_filters)
    # dec_1 = add(res_1_1, res_2_1, dec_1)

    up_final = upBlock(dec_1, n_filters)

    net = up_final
    #merge1 = C.conv_block(merge1, n_filters)
    if one_hot_label:
        net = Conv2D(2, 1, 1, activation='relu', border_mode='same')(net)
        net = Reshape((2, input_shape[0] * input_shape[1]))(net)
        net = Permute((2, 1))(net)
        net = Activation('softmax')(net)
    # model = Model(inputs=[inputs1, inputs2, inputs_waves1, inputs_waves2, inputs_waves3, inputs_waves4], outputs=net)
    else:
        net = Conv2D(1,
                     kernel_size=1,
                     strides=1,
                     activation='sigmoid',
                     padding='same')(net)
    model = Model(inputs=[
        inputs1, inputs2, inputs_waves1, inputs_waves2, inputs_waves3,
        inputs_waves4
    ],
                  outputs=net)

    # model = Model(inputs=[inputs1, inputs2], outputs=net)

    return model
Пример #11
0
            continue
        if "vals" in eval[1].groupdict():
            yield dict(common.concat([
                zip(
                    cols.strip().split(),
                    eval[1].groupdict()["vals"].strip().split()
                ),
                [("time", common.gentime() if not eval[1].groupdict()["time"] else eval[1].groupdict()["time"])]
            ]))
        #

rdict={
    #    Linux 2.6.32-431.17.1.el6.x86_64 (jpn-zaq50)    04/17/15        _x86_64_        (4 CPU)
    #15:27:24        CPU      %usr     %nice      %sys   %iowait    %steal      %irq     %soft    %guest     %idle
    #15:27:26        all     36.56      0.00     24.25      0.00      0.00      0.00      0.00      1.13     38.07
    "^\s*(?P<uname>Linux\s+\S+)\s+(?P<host>\S+)\s+(?P<date>\S+)\s+\S+\s+\((?P<numcpu>\d+)\s+CPU\)\s*$": (parse,__name__),
    "^(?P<time>\d{2}:\d{2}:\d{2}) (?P<noon>AM|PM){0,1}\s+(?P<cols>(\S*[^\s0-9\.\-\+]\S*\s+){0,}\S*[^\s0-9\.\-\+]\S*)\s*$": (parse,__name__),
    "^(?P<time>\d{2}:\d{2}:\d{2}) (?P<noon>AM|PM){0,1}\s+(?P<vals>(\S+\s+){0,}[0-9\.\-\+]+)\s*$": (parse,__name__),
    "^(?P<junk>Average:.*)\s*$": (parse,__name__),
}
if __name__=="__main__":
    import json
    import subprocess
    from logging import basicConfig
    basicConfig(level=10)
    rdict = common.regex_dict(dict(common.concat([ d.items() for d in [rdict,] ])))
    c=subprocess.Popen("sar -A 2 2", shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE)
    gen=parse(rdict, [], c)
    for dat in gen:
        print json.dumps(dat)
Пример #12
0
def build_lanenet_att(input_shape,
                      input_shape1=[128, 128, 3],
                      input_shape2=[64, 64, 3],
                      input_shape3=[32, 32, 3],
                      input_shape4=[16, 16, 3],
                      one_hot_label=False):
    print(input_shape, 'Input size shape ~')
    inputs = Input((input_shape))
    n_filters = 64

    inputs_waves1 = Input((input_shape1))
    inputs_waves2 = Input((input_shape2))
    inputs_waves3 = Input((input_shape3))
    inputs_waves4 = Input((input_shape4))
    #wavelet1, wavelet2, wavelet3, wavelet4 = C.lanenet_wavelet(inputs)

    enc1 = C.encoder_block(inputs, n_filters, blocks=2)
    skip1 = C.pool(enc1)
    _skip1 = C.concat(skip1, inputs_waves1)

    enc2 = C.encoder_block(_skip1, 2 * n_filters, blocks=2)
    skip2 = C.pool(enc2)
    _skip2 = C.concat(skip2, inputs_waves2)

    enc3 = C.encoder_block(_skip2, 4 * n_filters)
    skip3 = C.pool(enc3)
    _skip3 = C.concat(skip3, inputs_waves3)

    enc4 = C.encoder_block(_skip3, 8 * n_filters)
    skip4 = C.pool(enc4)
    _skip4 = C.concat(skip4, inputs_waves4)

    enc5 = C.encoder_block(_skip4, 8 * n_filters)
    skip5 = C.pool(enc5)

    bridge = C.encoder_block(skip5, 32 * n_filters, blocks=2)
    #bridge = C.encoder_block(bridge, 16 * n_filters)
    up4 = C.desconv(bridge, 8 * n_filters)
    att4 = attention(up4, skip4, 8 * n_filters)
    merge4 = C.add(up4, att4)
    merge4 = C.conv_block(merge4, 8 * n_filters, 1)

    up3 = C.desconv(merge4, 4 * n_filters)
    att3 = attention(up3, skip3, 4 * n_filters)
    merge3 = C.add(up3, att3)
    merge3 = C.conv_block(merge3, 4 * n_filters, 1)

    up2 = C.desconv(merge3, 2 * n_filters)
    att2 = attention(up2, skip2, 2 * n_filters)
    merge2 = C.add(up2, att2)
    merge2 = C.conv_block(merge2, 2 * n_filters, 1)

    up1 = C.desconv(merge2, n_filters)
    att1 = attention(up1, skip1, n_filters)
    net = C.add(up1, att1)
    net = C.conv_block(net, n_filters, 1)

    net = C.desconv(net, 1)
    #merge1 = C.conv_block(merge1, n_filters)
    if one_hot_label:
        net = Conv2D(2, 1, 1, activation='relu', border_mode='same')(net)
        net = Reshape((2, input_shape[0] * input_shape[1]))(net)
        net = Permute((2, 1))(net)
        net = Activation('softmax')(net)
    else:
        net = Conv2D(1, 1, 1, activation='sigmoid')(net)
    model = Model(inputs=[
        inputs, inputs_waves1, inputs_waves2, inputs_waves3, inputs_waves4
    ],
                  outputs=net)

    return model