Пример #1
0
def ConfusionMatrix(logits, labels, dataset, filename, text_file):
    C = np.zeros((len(dataset.tacticName), len(dataset.tacticName)))
    CM = np.zeros((len(dataset.tacticName) + 1, len(dataset.tacticName) + 1))
    ''' convert tactic from network order to dataset order '''
    reorder = np.concatenate(dataset.C5k_CLASS).ravel().tolist()
    orderTactic = [dataset.tacticName[i] for i in reorder]
    #orderTactic = ['F23','EV','HK','PD','RB','WS','SP','WW','PT','WV']
    tacticNum = len(orderTactic)
    for bagIdx in range(len(labels)):
        gt = np.argmax(labels[bagIdx])
        pred = np.argmax(logits[bagIdx])
        #pred = logits[bagIdx]
        C[gt, pred] = C[gt, pred] + 1

    gtC = np.sum(C, axis=1)
    predC = np.sum(C, axis=0)
    TP = np.diag(C)
    precision = TP / predC

    recall = TP / gtC
    CM[0:tacticNum, 0:tacticNum] = C
    CM[tacticNum, 0:tacticNum] = precision
    CM[0:tacticNum, tacticNum] = recall.T
    CM[tacticNum, tacticNum] = np.sum(TP) / np.sum(C) * 100
    rowIdx = orderTactic + ['Precision']
    colIdx = orderTactic + ['Recall']
    df = pd.DataFrame(CM, index=rowIdx, columns=colIdx)

    utils.printLog(text_file, df.round(3).to_string())
    #print(df.round(3))
    #text_file.write(df.round(3).to_string())
    #print(C)
    #text_file.write(np.array2string(C))
    if filename is not None:
        df.round(3).to_csv(filename, na_rep='NaN')
Пример #2
0
    def run(self):
        utils.printLog('SERVER', ('listening on port %s' % self.port))
        addr = usocket.getaddrinfo('0.0.0.0', self.port)[0][-1]
        socket = usocket.socket()
        socket.bind(addr)
        socket.listen(1)

        while True:
            cl, addr = socket.accept()
            cl.settimeout(2.5)
            self.process(cl, addr)
Пример #3
0
def runBlast(fastaFile, blastFile):
    """Run (multithreaded when possible) BLAST on the fastaFile and output the result with option 6 format as the blastFile"""
    i0 = time.clock()
    nbCPU = multiprocessing.cpu_count()
    n = nbCPU - 2
    tag = random.randint(100000, 1000000)
    dbFile = fastaFile + "_" + str(tag) + ".db"
    dbCmd = """makeblastdb -in %s -input_type "fasta" -dbtype prot -out %s -hash_index""" % (
        fastaFile, dbFile)
    proc1 = Popen(args=[dbCmd], shell=True, executable="/bin/bash")
    proc1.communicate()
    blastCmd = """%s -i %s -out %s -db %s -th %d -evalue 1e-5""" % (
        blastAll, fastaFile, blastFile, dbFile, n)
    try:
        BA.main(inFile=fastaFile, out=blastFile, db=dbFile, th=n)
    except IOError as e:
        printLog("Error in %s: %s\nExiting." % (blastCmd, e), handle)
        return ()
    cleanCmd = """rm %s*""" % dbFile
    proc3 = Popen(args=[cleanCmd], shell=True, executable="/bin/bash")
    proc3.communicate()
    myTimer(i0, "Completed runBlast")
Пример #4
0
def runDescription(annotFile,
                   radical=None,
                   ID=None,
                   keyList=None,
                   handle=sys.stderr,
                   config=None):
    """Compute the result of Description.py on the trailFile hierarchy with basis the annotFile."""
    if not ID:
        ID = "UniqID"
    if keyList:
        keyString = ",".join(keyList)
    descFile = radical + ".desc"
    xmlFile = radical + ".xml_desc"
    configFile = radical + ".config"
    trailFile = radical + ".trail"
    edgeFile = radical + ".edges"
    compFile = radical + ".twin_comp"
    #cmd8 = """%s -i %s -o %s -X %s -a -D -c %s -k %s -H %s %s %s""" % (Description,ID,descFile,configFile,compFile,keyString,trailFile,edgeFile,annotFile)
    cmd8 = """%s -i %s -o %s -X %s -a -D -c %s -H %s %s %s""" % (
        Description, ID, descFile, configFile, compFile, trailFile, edgeFile,
        annotFile)
    printLog(
        "--------------------------------------------------\nConfiguring %s" %
        cmd8, handle)
    try:
        if config:
            D.Main(edgeFile,
                   annotFile,
                   nodeID=ID,
                   outFile=descFile,
                   X=configFile,
                   restrAnnot=True,
                   display=False,
                   comp=compFile,
                   hist=trailFile,
                   keyList=keyString,
                   log=handle)
        else:
            D.Main(edgeFile,
                   annotFile,
                   nodeID=ID,
                   outFile=descFile,
                   X=configFile,
                   restrAnnot=True,
                   display=True,
                   comp=compFile,
                   hist=trailFile,
                   keyList=keyString,
                   log=handle)
    except IOError as e:
        printLog("Error in %s: %s\nExiting." % (cmd8, e), handle)
        return ()
    #time.sleep(15)
    if config:
        xmlform.main(xmlFile=configFile)
    cmd8bis = """%s -i %s -o %s -O %s -x %s -a -H %s %s %s""" % (
        Description, ID, descFile, xmlFile, configFile, trailFile, edgeFile,
        annotFile)
    printLog(
        "--------------------------------------------------\nRunning %s" %
        cmd8bis, handle)
    try:
        D.Main(edgeFile,
               annotFile,
               nodeID=ID,
               outFile=descFile,
               Xout=xmlFile,
               x=configFile,
               restrAnnot=True,
               display=True,
               hist=trailFile,
               keyList=keyList,
               log=handle)
    except IOError as e:
        printLog("Error in %s: %s\nExiting." % (cmd8bis, e), handle)
        return ()
Пример #5
0
import machine
import sonoff_server_controller
import server
import status_server_controller
import utils
import network

utils.printLog("NODEMCU", "sonoff")

statusPin = machine.Pin(13, machine.Pin.OUT)

def timeout1seconds():
    isconnected = network.WLAN(network.STA_IF).isconnected()
    statusPin.value(1 if not isconnected else 0)

def timeout10minutes():
    utils.syncDatetime()

tim0 = machine.Timer(0)
tim0.init(period=1000, mode=machine.Timer.PERIODIC, callback=lambda t: timeout1seconds())
tim1 = machine.Timer(1)
tim1.init(period=600000, mode=machine.Timer.PERIODIC, callback=lambda t: timeout10minutes())
timeout10minutes()

sonoffServerController = sonoff_server_controller.getInstance()
controllers = [sonoffServerController]
statusController = status_server_controller.StatusServerController('Sonoff', controllers)
_server = server.Server(33455, controllers + [statusController])
_server.run()
Пример #6
0
import config
import temperature_sensor
import display
import machine
import server
import status_server_controller
import utils
import utime

utils.printLog("NODEMCU", "thermometer boot up")

i2c = machine.I2C(scl=machine.Pin(config.D2),
                  sda=machine.Pin(config.D1),
                  freq=400000)

_temperature_sensor = temperature_sensor.TemperatureSensor(
    machine.Pin(config.D3))
_display = display.Display(i2c, _temperature_sensor)


def timeout1second(timer):
    _temperature_sensor.update()
    hour = utime.localtime(utime.time())[3]
    _display.setBacklight(7 <= hour and hour <= 22)
    _display.update()


def timeout1minute(timer):
    _temperature_sensor.upload()

Пример #7
0
import config
import esp
import machine
import utils
import ubinascii

utils.printLog('ANIMATOR', 'fast set leds')
_config = utils.readJson('animator.data')
if _config and _config['powered_on'] and _config['use_color']:
    pin = machine.Pin(config.D4, machine.Pin.OUT)
    bytes = ubinascii.unhexlify(_config['color'])
    color = [bytes[1], bytes[0], bytes[2]]
    esp.neopixel_write(pin, bytearray(color * _config['leds']), 1)
utils.printLog('ANIMATOR', 'finish')
Пример #8
0
if __name__ == '__main__':
    MethodString = """Clustering method (-m METHOD):
'fg': community_fastgreedy(self, weights=None)
\tCommunity structure based on the greedy optimization of modularity.
'im': community_infomap(self, edge_weights=None, vertex_weights=None, trials=10)F
\tFinds the community structure of the network according to the Infomap method of Martin Rosvall and Carl T.
'le': community_leading_eigenvector(clusters=None, weights=None, arpack_options=None)
\tNewman's leading eigenvector method for detecting community structure.
'lp': community_label_propagation(weights=None, initial=None, fixed=None)
\tFinds the community structure of the graph according to the label propagation method of Raghavan et al.
'ml': community_multilevel(self, weights=None, return_levels=False)
\tCommunity structure based on the multilevel algorithm of Blondel et al.
'om': community_optimal_modularity(self, *args, **kwds)
\tCalculates the optimal modularity score of the graph and the corresponding community structure.
'eb': community_edge_betweenness(self, clusters=None, directed=True, weights=None)
\tCommunity structure based on the betweenness of the edges in the network.
'sg': community_spinglass(weights=None, spins=25, parupdate=False, start_temp=1, stop_temp=0.01, cool_fact=0.99, update_rule='config', gamma=1, implementation='orig', lambda_=1)
\tFinds the community structure of the graph according to the spinglass community detection method of Reichardt & Bornholdt.
'wt': community_walktrap(self, weights=None, steps=4)
\tCommunity detection algorithm of Latapy & Pons, based on random walks.
"""
    prog = sys.argv[0].split("/")[-1]
    parser = processArgs()
    args = parser.parse_args()
    header = " ".join(sys.argv)
    printLog(header,args.l,mode="w")
    print(vars(args))
    Main(edgeFile=args.edgeFile,outFile=args.o,method=args.m,log=args.l)

Пример #9
0
def Main(blastFile=None,
         genome2sequence=None,
         sep=None,
         thr=None,
         cov=None,
         in_network=None,
         fasta=None,
         aln=None,
         clust=None,
         annot=None,
         key=None,
         keyList=None,
         log=None,
         directory=None,
         config=None):
    """ Main program """
    ###
    try:
        startWD = os.path.abspath(os.path.dirname(blastFile))
    except:
        startWD = os.path.abspath(os.getcwd())
    os.chdir(startWD)
    if directory:
        rootDir = os.path.abspath(directory)
        if not os.path.exists(rootDir):
            os.makedirs(rootDir)
    else:
        rootDir = os.getcwd()
    if log != sys.stderr:
        log = os.path.join(rootDir, log)
    ### Argument processing =============================================================================================================
    if not blastFile or not genome2sequence:
        sys.exit("Required files %s and %s" % ("blastFile", "genome2sequence"))
    blastFile = os.path.abspath(blastFile)
    genome2sequence = os.path.abspath(genome2sequence)
    ThresholdList = list(map(int, thr.strip().split(",")))
    cover = float(cov)
    print("Starting directory: %s" % startWD)
    print("Root directory: %s" % rootDir)
    if fasta:
        if aln == "b":
            runBlast(fasta, blastFile)
        elif aln == "d":
            runDiamond(fasta, blastFile)
        else:
            sys.exit(
                "Wrong sequence comparison option -- use (b) for BLAST - (d) for DIAMOND"
            )
    UniqID = key
    ## Filename definitions =============================================================================================================
    if in_network:
        geneNetwork = os.path.abspath(in_network)
    else:
        geneNetwork = blastFile + ".cleanNetwork"
    if annot:
        annot = os.path.abspath(os.path.join(startWD, annot))
        if keyList:
            keyList = keyList.split(",")
        else:
            with open(annot, 'r') as ANNOT:
                keyList = ANNOT.readline().strip().split(sep)[1:]
    else:
        annot = None
        keyList = None
    ## Corps du programme ===========================================
    inext = time.clock()
    os.chdir(rootDir)
    ## A) from the blast output to the sequence families
    # a) filter self-hits and keep only best hit
    if not in_network:
        cmd1 = "%s -n 1 -i %s" % (
            cleanblast, blastFile
        )  # the output are three files named blastFile".cleanNetwork", blastFile".cleanNetwork.dico" and blastFile".cleanNetwork.genes"
        printLog(
            "--------------------------------------------------\nRunning %s" %
            cmd1, log)
        proc1 = Popen(args=[cmd1],
                      shell=True,
                      stdout=PIPE,
                      executable="/bin/bash")
        out = proc1.communicate()[0]
        printLog(out.decode('utf-8'), log)
    # b) perform complete analysis for each threshold
    for n in ThresholdList:
        STR = """--------------------------------------------------\nSimilarity threshold %d%%""" % n
        printLog(STR, log)
        completeAnalysis(geneNetwork,
                         genome2sequence,
                         n,
                         cover,
                         a=annot,
                         clustType=clust,
                         UniqID=key,
                         sep=sep,
                         keyList=keyList,
                         handle=log,
                         config=config)
        os.chdir(rootDir)
    ## Fin ======================================================
    prog = myModule()
    if prog == "__main__.py":
        prog = sys.argv[0].split("/")[-1]
    ## Sortie ======================================================
    return ()
Пример #10
0
def keyPlayerResult(Y_pred, Y_label, y_pred, dataset, phase, text_file,
                    log_file, info_file):
    if phase == 'train':
        selectIndex = dataset.trainIdx
    else:
        selectIndex = dataset.testIdx
    NAME = []
    for i in range(len(selectIndex)):
        hit = [(selectIndex[i] in dataset.videoIndex[r])
               for r in range(len(dataset.videoIndex))]
        loc = hit.index(True)
        tacticName = dataset.tacticName[loc]
        string = tacticName + "-" + selectIndex[i].astype(str) + ":"
        NAME.append(string)
    #nv_reorder = [0,1,2,3,8,4,6,5,9,5,7]
    #[[0,1,2,3,5,7],[6,9],[4,8]]

    reorder = np.concatenate(dataset.C5k_CLASS).ravel().tolist()
    inv_reorder = [reorder.index(i) for i in range(len(dataset.tacticName))]
    PRED_NAME = []

    for i in range(len(selectIndex)):
        pred_loc = Y_pred[i].tolist().index(1)
        tacticName = dataset.tacticName[inv_reorder[pred_loc]]
        PRED_NAME.append(tacticName)

    rolePlayerMap = dataset.gtRoleOrder[selectIndex]
    Y_correct_map = Y_pred * Y_label
    Y_correct = 2 * np.sum(Y_correct_map, axis=-1,
                           keepdims=True) - 1  #correct=1, error=-1
    NUM_PLAYER = rolePlayerMap.shape[1]
    keyPlayers = y_pred * (rolePlayerMap + 1) * np.tile(
        Y_correct, [1, NUM_PLAYER])
    DAT = np.column_stack((PRED_NAME, keyPlayers.astype(int)))
    df = pd.DataFrame(DAT,
                      index=NAME,
                      columns=['Y_pred', 'p1', 'p2', 'p3', 'p4', 'p5'])
    if text_file is not None:
        #np.savetxt(text_file, DAT, delimiter=" ", fmt="%s")
        df.to_csv(text_file, na_rep='NaN')
    #np.savetxt(text_file,keyPlayers,fmt='%d', delimiter=' ')
    #with open(text_file,'w') as file:
    #file.write(np.array2string(keyPlayers))
    rolePlayerAccumMap = np.zeros((len(dataset.tacticName), dataset.numPlayer))
    for i in range(len(selectIndex)):
        hit = [(selectIndex[i] in dataset.videoIndex[r])
               for r in range(len(dataset.videoIndex))]
        loc = hit.index(True)
        for p in range(dataset.numPlayer):
            if keyPlayers[i][p] > 0:
                roleIndex = int(keyPlayers[i][p]) - 1
                rolePlayerAccumMap[
                    loc, roleIndex] = rolePlayerAccumMap[loc, roleIndex] + 1

    num_k = np.zeros(len(dataset.tacticName), dtype=np.int8)
    k = 0
    boundary = len(dataset.C5k_CLASS[0])
    for idx in range(len(dataset.tacticName)):
        if idx >= boundary:
            k = k + 1
            boundary = boundary + len(dataset.C5k_CLASS[k])
        num_k[idx] = dataset.k[k]

    reorderMapTemp = rolePlayerAccumMap[reorder]
    reorderMapSum = np.sum(reorderMapTemp, axis=1, keepdims=True)
    num_vid = reorderMapSum / np.expand_dims(num_k, axis=1)
    reorderMap = np.concatenate((reorderMapTemp, num_vid), axis=1)
    orderTactic = [dataset.tacticName[i] for i in reorder]
    orderTacticInfo = [
        orderTactic[t] + '(' + num_k[t].astype(str) + ')'
        for t in range(len(dataset.tacticName))
    ]
    role = ['r1', 'r2', 'r3', 'r4', 'r5', 'num_vid']
    df = pd.DataFrame(reorderMap, index=orderTacticInfo,
                      columns=role).astype(int)
    #print(df)
    utils.printLog(log_file, " ")
    utils.printLog(log_file, df.to_string())
    if info_file is not None:
        df.to_csv(info_file, na_rep='NaN')
Пример #11
0
import animator_server_controller
import config
import machine
import server
import status_server_controller
import utils

utils.printLog("NODEMCU", "animator boot up")

animatorServerController = animator_server_controller.AnimatorServerController(
    machine.Pin(config.D4, machine.Pin.OUT))
statusController = status_server_controller.StatusServerController(
    'Animator', [animatorServerController])


def timeoutTick(timer):
    animatorServerController.tick()


def timeout10minutes(timer):
    utils.syncDatetime()


timeout10minutes(None)

tim1 = machine.Timer(0)
tim1.init(period=1, mode=machine.Timer.PERIODIC, callback=timeoutTick)
tim3 = machine.Timer(2)
tim3.init(period=600000,
          mode=machine.Timer.PERIODIC,
          callback=timeout10minutes)
Пример #12
0
import config
import machine
import utils
import vl53l0x

utils.printLog("NODEMCU", "radar boot up")

i2c = machine.I2C(scl=machine.Pin(config.D2), sda=machine.Pin(config.D1), freq=400000)

distanceSensor = vl53l0x.VL53L0X(i2c, 0x29)

def timeout100ms(timer):
    utils.printLog("RADAR", distanceSensor.read())

tim1 = machine.Timer(0)
tim1.init(period=100, mode=machine.Timer.PERIODIC, callback=timeout100ms)
Пример #13
0
def timeout100ms(timer):
    utils.printLog("RADAR", distanceSensor.read())
Пример #14
0
    def __init__(self, port):
        self.socket = socket.socket()
        self.port = int(port)
        self.socket.bind(('', self.port))
        self.socket.listen(5)
        printLog('Starting server at {}, Ctrl+C to stop'.format(''))
        self.channels = {}
        self.clients = {}
        self.SOCKET_LIST = [self.socket]
        try:
            while True:
                reading, writing, exceptioning = select.select(
                    self.SOCKET_LIST, [],
                    [])  # omit timeout to prevent high CPU utilization

                for sock in reading:
                    if sock == self.socket:
                        # accept connections from outside
                        # new socket is created for server to communicate with client
                        # this frees up server to listen for more connections
                        (new_socket, address) = self.socket.accept()
                        self.SOCKET_LIST.append(new_socket)
                        self.clients[new_socket.fileno()] = {
                            'name': '',
                            'channel': '',
                            'buffer': []
                        }
                    else:
                        try:
                            data = sock.recv(MESSAGE_LENGTH).decode()
                            if data:
                                msg_length = len(data)
                                output_str = None
                                if self.clients[sock.fileno()].get(
                                        'buffer', []):
                                    cached_msg = self.clients[
                                        sock.fileno()].get('buffer', []).pop()
                                    cached_len = len(cached_msg)
                                    if cached_len + msg_length > MESSAGE_LENGTH:
                                        output_str = cached_msg + data[:MESSAGE_LENGTH
                                                                       -
                                                                       cached_len]
                                        self.clients[sock.fileno()].get(
                                            'buffer',
                                            []).append(data[MESSAGE_LENGTH -
                                                            cached_len:])
                                    elif cached_len + msg_length == MESSAGE_LENGTH:
                                        output_str = cached_msg + data
                                    else:
                                        self.clients[sock.fileno()].get(
                                            'buffer',
                                            []).append(cached_msg + data)
                                else:
                                    if msg_length < MESSAGE_LENGTH:
                                        self.clients[sock.fileno()].get(
                                            'buffer', []).append(data)
                                    else:
                                        output_str = data

                                if output_str:
                                    output_str = output_str.rstrip()
                                    if not self.clients[sock.fileno()].get(
                                            'name', ''):
                                        self.clients[
                                            sock.fileno()]['name'] = output_str
                                    else:
                                        name = self.clients.get(
                                            sock.fileno(), {}).get('name')
                                        msg_lst = output_str.split(' ')
                                        # print(msg_lst)
                                        if msg_lst[0].startswith('/'):
                                            if msg_lst[0].startswith('/join'):
                                                self.join_channel(
                                                    msg_lst, sock)
                                            elif msg_lst[0].startswith(
                                                    '/create'):
                                                self.create_channel(
                                                    msg_lst, sock)
                                            elif msg_lst[0].startswith(
                                                    '/list'):
                                                self.list_channel(sock)
                                            else:
                                                self.send(
                                                    SERVER_INVALID_CONTROL_MESSAGE
                                                    .format(output_str), sock)
                                        else:
                                            channel = self.clients.get(
                                                sock.fileno(),
                                                {}).get('channel')
                                            if channel:
                                                self.broadcast(
                                                    "[{}] {}".format(
                                                        name, output_str),
                                                    sock,
                                                    self.channels.get(
                                                        channel, []), channel)
                                            else:
                                                self.send(
                                                    SERVER_CLIENT_NOT_IN_CHANNEL,
                                                    sock)
                            else:
                                self.remove_socket(sock)
                        except Exception as e:
                            self.remove_socket(sock)
                            printError(e)
            self.socket.close()
        except KeyboardInterrupt:
            printLog("Caught interrupt, stopping server", True)
            # self.socket.shutdown(socket.SHUT_RDWR)
            self.socket.close()
            sys.exit()
Пример #15
0
import config
import temperature_sensor
import machine
import pin_scheduler
import server
import status_server_controller
import utils

utils.printLog("NODEMCU", "water can boot up")

_temperature_sensor = temperature_sensor.TemperatureSensor()
pinScheduler = pin_scheduler.PinScheduler(
    machine.Pin(config.D5, machine.Pin.OUT), [((19, 0, 0), 36)])
statusController = status_server_controller.StatusServerController(
    'Pin Scheduler', [])
_server = server.Server(config.SERVER_PORT, [statusController])


def timeout1second(timer):
    _temperature_sensor.update()
    pinScheduler.update()


def timeout1minute(timer):
    if not pinScheduler.isTimeNearScheduler():
        _temperature_sensor.upload()


def timeout10minutes(timer):
    pass
Пример #16
0
def Main(edgeFile=None,annotFile=None,sep=None,outFile=None,Xout=None,restrAnnot=None,nodeList=None,NodeType=None,nodeID=None,unilat=None,track=None,empty=None,\
         x=None,X=None,K=None,hist=None,display=None,trail=None,comp=None,keyList=None,log=None):
    """ Main program """
    ### Argument/options listing
    startWD = os.getcwd()
    if log != sys.stderr:
        try:
            log = os.path.join(startWD, log)
        except TypeError:
            log = sys.stderr
    ### Argument processing ========================================
    ## Filename definitions ======================================
    i0 = time.clock()
    inext = i0
    if not outFile:
        inRad = edgeFile.split(".")[0]
        outFile = inRad + ".desc"
    if empty:
        track = None
    ## File reading options ========================================
    # Step 1) Store the node type (top(1)/bottom(2) in the bipartite graph), adapted for the k-partite case
    if not os.stat(edgeFile).st_size:
        if myModule() == "__main__.py":
            sys.exit("Error: Empty file %s" % edgeFile)
        else:
            raise IOError("Empty file %s" % edgeFile)
    nodeType = ut.readNodeType(
        edgeFile, Type=NodeType
    )  # this step is not REALLY necessary, in the sense that only the values of the nodeType file are used here
    try:
        nodeTypes = list(set(nodeType.values(
        )))  # likely an overkill, but does not seem to be time-consuming
    except AttributeError:  # this is for the unipartite case (or is it?)
        nodeTypes = [1]
    inext = myTimer(inext, "Reading nodeType", handle=log)
    ## Step 2) Read XML configuration file or generate and exit. ++++++++++++++++++++++++++++++++++++++++++++++++++
    # a) set variables.
    if keyList:
        keyDict = ut.processOptions(keyList, nodeTypes)
    else:
        selectedKeys = list(ut.getHeader(annotFile).keys())
        selectedKeys.remove(nodeID)
        keyDict = dict()
        for n in nodeTypes:
            keyDict[n] = selectedKeys
    trailObjects = []
    compObject = None
    root = os.getcwd()
    if comp:
        compObject = ut.myMod(fileName=comp, attDict={0: "Module"})
    if hist:  # added option to generate complete trailHistory in the XML file : options.H is the main trailFile (from rootDir to cwDir)
        history = ut.trailTrack(hist)
        root = ut.trailHist(hist)['root']
        k = 1
        for trailName in history:
            trailKeyDict = dict([(i, "NodeType" + str(i)) for i in nodeTypes])
            Trail = ut.myTrail(fileName=trailName,
                               rank=k,
                               attDict=trailKeyDict)
            trailObjects.append(Trail)
            k += 1
    if x:  # this option gives the name of the config file, and proceeds with the description procedure
        configFile = x
        if X:  # options.X is the name of the configurationFile that will be generated (default = "config.xml")
            if x == X:
                configFile = ut.generateXML(nodeTypes,
                                            trailObjects=trailObjects,
                                            compObject=compObject,
                                            attDict=keyDict,
                                            outFile=X,
                                            display=display,
                                            root=root)
            else:
                sys.exit("Conflicting fields -x and -X. Check and run again.")
        if K:
            ret = xmlform.main(xmlFile=configFile)
            if ret == "Cancel":
                sys.exit(0)
        trailObjects, compObject, keyDict, selectedKeys, XML = ut.readConfigFile(
            configFile)
        ut.printDescription(trailObjects,
                            compObject,
                            keyDict,
                            selectedKeys,
                            handle=sys.stderr)
    else:  # this block will generate the config file and stop: we start with this part.
        if X:  # options.X is the name of the configurationFile that will be generated (default = "config.xml")
            outConf = X
        else:
            outConf = "config.xml"
        ## selectedKeys are obtained as header of the annotFile
        configFile = ut.generateXML(nodeTypes,
                                    trailObjects=trailObjects,
                                    compObject=compObject,
                                    attDict=keyDict,
                                    outFile=outConf,
                                    display=display,
                                    root=root)
        #configFile = generateXML(nodeTypes,trailObjects=trailObjects,compObject=compObject,attDict=keyDict,outFile=X,display=display)
        if myModule() == "__main__.py":
            printLog(
                "Configured file %s: please check options, and pass it with -x option"
                % outConf, log)
        return ()
    ## Step 3) Define nodeLists of currentID and UniqID. +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    if NodeType == '2':
        nodes = nodeType.keys()
    elif NodeType == '1':
        nodes = ut.readNodes(edgeFile, sep=sep)
        nodeType = ut.initDict(nodes, value=1)
    else:
        nodes = ut.readNodes(edgeFile, sep=sep)
    if nodeList:  # if we explicitly give a file with the currentID to restrict to.
        nodeFile = options.n
        nodes = ut.file2set(
            nodeFile)  # nodes is actually a list (but without repetitions)!
        inext = myTimer(inext, "Reading nodeFile", handle=log)
    if unilat:
        nTypes = set(unilat.strip().split(","))
        nodes = (node for node in nodes if nodeType[node] in nTypes)
    printLog("""Loaded %d nodes""" % len(nodes), log)
    # Selected UniqIDs: ========
    if trailObjects:
        trailObjects[-1].getDict(
        )  # here the dictionaries of the main trail file are loaded.
        current2UniqID = trailObjects[-1].dict_inv
        myEntries = ut.unList(map(lambda x: current2UniqID[x], nodes))
    else:
        myEntries = nodes
        current2UniqID = None
    printLog("""Found %d entries""" % len(myEntries), log)
    inext = myTimer(inext, "Reading allEntries", handle=log)
    # Annotation file processing: ==========
    if restrAnnot:
        annotationDict, fields = ut.restrictAnnot(annotFile,
                                                  mainKey=str(nodeID),
                                                  valueKeyList=selectedKeys)
    else:
        annotationDict, fields = ut.myLoadAnnotations(
            annotFile,
            mainKey=str(nodeID),
            valueKeyList=selectedKeys,
            counter=0)
    inext = myTimer(inext, "Reading annotations", handle=log)
    ## Step 4) Construct the actual description. +++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++
    OutFile = Xout
    ut.xmlDescription(annot=annotationDict,
                      nodeDict=current2UniqID,
                      entries=myEntries,
                      compObject=compObject,
                      trails=trailObjects,
                      nodeType=nodeType,
                      keyDict=keyDict,
                      xmlOutFile=OutFile,
                      outFile=outFile,
                      track=track,
                      X=XML,
                      handle=log)
    if Xout:
        printLog("XML output written to %s" % OutFile, log)
    else:
        printLog("Description written to %s" % outFile, log)
    ## Output and exit ======================================================
    prog = myModule()
    if prog == "__main__.py":
        prog = sys.argv[0].split("/")[-1]
    inext = myTimer(i0, "Total computing time for %s" % prog, handle=log)
    return
Пример #17
0
                      xmlOutFile=OutFile,
                      outFile=outFile,
                      track=track,
                      X=XML,
                      handle=log)
    if Xout:
        printLog("XML output written to %s" % OutFile, log)
    else:
        printLog("Description written to %s" % outFile, log)
    ## Output and exit ======================================================
    prog = myModule()
    if prog == "__main__.py":
        prog = sys.argv[0].split("/")[-1]
    inext = myTimer(i0, "Total computing time for %s" % prog, handle=log)
    return


#========= Main program
if __name__ == '__main__':
    prog = sys.argv[0].split("/")[-1]
    parser = processArgs()
    args = parser.parse_args()
    CMD = " ".join(sys.argv)
    printLog(CMD, args.l, mode="w")
    print(vars(args))
    if not args.G:
        Main(edgeFile=args.i,annotFile=args.a,sep=args.s,outFile=args.o,Xout=args.O,restrAnnot=args.A,nodeList=args.n,NodeType=args.N,\
             nodeID=args.I,unilat=args.u,track=args.T,empty=args.E,x=args.x,X=args.X,K=args.K,hist=args.H,display=args.D,trail=args.t,comp=args.c,keyList=args.k,log=args.l)
    else:
        ds_launcher.main(prog, args)
Пример #18
0
def Main(edgeFile=None,outFile=None,sep=None,unilat=None,twin_supp=None,Twin_Supp=None,min_supp=None,min_size=None,nodeType=None,comp=None,debug=None,log=None):
    """ Main program """
    i0 = time.clock()
    inext = i0
    ## File reading options ========================================
    if not outFile:
        outFile = edgeFile+".twins"
    thr = min_supp
    try:
        k_part = int(nodeType)
    except (TypeError,ValueError):
        k_part = nodeType
    ## File reading ========================================
    if not os.stat(edgeFile).st_size:
        if myModule() == "__main__.py":
            sys.exit("Error: Empty file %s" % edgeFile)
        else:
            raise IOError("Empty file %s" % edgeFile)
    g = ut.myReadGraph(edgeFile)
    print(g.summary())
    id2name = {}
    name2id = {}
    for n in g.vs():
        name = n['name']
        ind = n.index
        id2name[ind] = name
        name2id[name] = ind
    inext = myTimer(i0,"Loading graph",handle=log)
    ## Program body ===========================================
    # Adjacency list computation ------------------------------
    getName = lambda x:id2name[x]
    nodes = None
    if unilat:
        typeSet = set(map(lambda x:int(x),unilat.strip().split(",")))
        typeDict = defaultdict(int)
        if k_part == 2 or not k_part:
            typeDict.update(ut.rawNodeType(edgeFile))
        elif k_part != 1:
            typeDict.update(ut.loadNodeType(k_part))
        nodes = (n.index for n in g.vs() if typeDict[n['name']] in typeSet)
    ADJ = ut.getAdjlist(g,nodes=nodes)
    inext = myTimer(inext,"Computation of adjacency list",handle=log)
    # Twin computation ----------------------------------------
    support,twins = ut.detectRepeated(ADJ,k_init=0,debug=debug)   # support: groupID -> common_list_of_neighbours; twins: node -> groupID_of_its_twin_class
    inext = myTimer(inext,"Computation of twins",handle=log)
    new_support = dict([(gid,tuple(map(getName,support[gid]))) for gid in support])
    new_twins = dict([(id2name[node],twins[node]) for node in twins])
    support = new_support
    twins = new_twins
    inext = myTimer(inext,"Renumbering of twins",handle=log)
    sniwt = ut.InvertMap(twins)   # groupID -> list_of_twin_nodes
    inext = myTimer(inext,"Computation of twin support",handle=log)
    # Computation of components (twins + support)
    if comp:
        with open(comp,'w') as h:
            for key,val in iter(twins.items()):
                outString = str(key)+sep+str(val)+"\n"
                h.write(outString)
            inext = myTimer(inext,"Writing twins file",handle=log)
            for val,nodes in iter(support.items()):
                for node in nodes:
                    outString = str(node)+sep+str(val)+"\n"
                    h.write(outString)
            inext = myTimer(inext,"Writing twins component file",handle=log)
    # Computation of twinSupport (twinID twinNb twinSupport)
    if twin_supp:
        with open(twin_supp,'w') as g:
            for i,nodeList in iter(sniwt.items()):
                supp = support[i]
                if len(supp) >= thr and len(nodeList) >= min_size:   # Threshold for trivial twins (new option 15/12/15)
                    vals = [str(i),str(len(nodeList)),str(len(supp))]
                    vals.extend(list(map(lambda x:str(x),supp)))
                    g.write("\t".join(vals)+"\n")
        inext = myTimer(inext,"Writing twins support file",handle=log)
    # Computation of TwinSupport (twinID twinNodes twinSupport)
    if Twin_Supp:
        with open(Twin_Supp,'w') as g:
            for i,nodeList in iter(sniwt.items()):
                supp = support[i]
                if len(supp) >= thr and len(nodeList) >= min_size:   # Threshold for trivial twins (new option 15/12/15)
                    myTwins = ",".join(map(lambda x:str(x),nodeList))
                    mySupport = ",".join(map(lambda x:str(x),supp))
                    vals = [str(i)]
                    vals.extend(list(map(lambda x:str(x),supp)))
                    g.write("\t".join(vals)+"\n")
        inext = myTimer(inext,"Writing Twins Support file",handle=log)
    ut.outputDict(twins,outFile,sep=sep)
    allTwins = len(sniwt.keys())
    t = len([i for (i,v) in iter(sniwt.items()) if len(v) == 1])
    try:
        tp = 100*float(t)/float(allTwins)
    except:
        tp = 0
    nt = allTwins - t
    try:
        ntp = 100*float(nt)/float(allTwins)
    except:
        ntp = 0
    printLog("""Found %s twins, %s trivial twins (%.2f%%) and %s non-trivial twins (%.2f%%)""" % (allTwins,t,tp,nt,ntp),log)
    ## Ending ======================================================
    prog = myModule()
    if prog == "__main__.py":
        prog = sys.argv[0].split("/")[-1]
    inext = myTimer(i0,"Total computing time for %s" % prog,handle=log)
    return
Пример #19
0
import config
import machine
import pin_server_controller
import server
import status_server_controller
import temperature_sensor
import utils

utils.printLog("NODEMCU", "rstrip boot up")

_temperature_sensor = temperature_sensor.TemperatureSensor(
    machine.Pin(config.D6))


def timeout1minute(timer):
    _temperature_sensor.update()
    _temperature_sensor.upload()


def timeout10minutes(timer):
    utils.syncDatetime()


tim0 = machine.Timer(0)
tim0.init(period=60000, mode=machine.Timer.PERIODIC, callback=timeout1minute)
tim1 = machine.Timer(1)
tim1.init(period=600000,
          mode=machine.Timer.PERIODIC,
          callback=lambda t: timeout10minutes())
timeout1minute(None)
timeout10minutes(None)
Пример #20
0
def completeAnalysis(geneNetwork,
                     genome2sequence,
                     n,
                     c,
                     a=None,
                     clustType=None,
                     UniqID=None,
                     sep=None,
                     keyList=None,
                     handle=sys.stderr,
                     config=None):
    """Perform complete bipartite and twin analysis at a given identity threshold n"""
    directory = "graphs" + str(n)
    try:
        os.mkdir(directory)
    except OSError:
        pass
    # Names and file definitions
    if clustType == 'cc':
        seqCompFile = "CC.nodes"  # compFile for sequences
        eFile = "CC.edges"
        iFile = "CC.info"
    elif clustType == 'families':
        seqCompFile = "family.nodes"  # compFile for sequences
        eFile = "family.edges"
        iFile = "family.info"
    else:
        sys.exit("Bad clustering type -- see -C option")
    edgeFile = "graph.edges"  # edgeFile
    trailFile = "graph.trail"  # trailFile
    geneNetworkDico = geneNetwork + ".dico"
    geneNetworkGenes = geneNetwork + ".genes"
    ## ==============================
    # c) assemble sequence families by computing the connected components
    cmd2 = """%s -i %s -d %s -n %s -m %s -p %d""" % (
        familydetector, geneNetwork, directory, geneNetworkGenes, clustType, n)
    printLog(
        "--------------------------------------------------\nRunning %s" %
        cmd2, handle)
    proc2 = Popen(args=[cmd2], shell=True, stdout=PIPE, executable="/bin/bash")
    out = proc2.communicate()[0]
    printLog(out.decode('utf-8'), handle)
    mySeqCompFile = os.path.join(directory, seqCompFile)
    myiFile = os.path.join(directory, iFile)
    myeFile = os.path.join(directory, eFile)
    # renumber back families through geneNetworkDico
    dic1 = ut.loadMapping(geneNetworkDico)
    dic2 = ut.node2communityFasta(mySeqCompFile, sep=sep)
    compDict = ut.composeDict(dic1, dic2)
    ut.outputDict(compDict, mySeqCompFile, sep=sep)
    cleanCmd = """rm %s %s""" % (myiFile, myeFile)
    procClean = Popen(args=[cleanCmd], shell=True, executable="/bin/bash")
    procClean.communicate()
    ## B) from the sequence families to the bipartite graph
    # a) Cluster sequence families and quotient the graph
    cmd3 = """%s -c %s -k %s -d %s %s %s %s""" % (
        FactorGraph, mySeqCompFile, UniqID, directory, genome2sequence,
        edgeFile, trailFile)
    printLog(
        "--------------------------------------------------\nRunning %s" %
        cmd3, handle)
    FG.Main(edgeFile=genome2sequence,
            outEdgeFile=edgeFile,
            outTrailFile=trailFile,
            direct=directory,
            community=mySeqCompFile,
            comm_id=UniqID,
            sep=sep,
            log=handle,
            header=cmd3)
    os.chdir(directory)
    printLog(
        "--------------------------------------------------\ncd %s" %
        directory, handle)
    ##
    rad = "graph0"
    # b) Remove the degree one nodes from the sequence side
    edges = rad + ".edges"
    cmd4 = """%s -d 1 -u 2 %s %s""" % (shaveGraph, edgeFile, edges)
    printLog(
        "--------------------------------------------------\nRunning %s" %
        cmd4, handle)
    SG.Main(edgeFile=edgeFile,
            outEdgeFile=edges,
            degree=1,
            nodeType=2,
            sep=sep,
            log=handle)
    # d) Compute twins and twin supports of the bipartite graph
    twins = rad + ".twins"
    twinComp = rad + ".twin_comp"
    cmd6 = """%s -o %s -u 2 -c %s %s """ % (DetectTwins, twins, twinComp,
                                            edges)
    printLog(
        "--------------------------------------------------\nRunning %s" %
        cmd6, handle)
    try:
        DT.Main(edgeFile=edges,
                outFile=twins,
                sep=sep,
                unilat='2',
                comp=twinComp,
                log=handle)
    except IOError as e:
        printLog("Error in %s: %s\nExiting." % (cmd6, e), handle)
        return ()
    ## C) from the bipartite graph to the twins and articulation points
    # a) twin quotienting
    twinDir = "TwinQuotient"
    try:
        os.mkdir(twinDir)
    except OSError:
        pass
    rad = "graph1"
    newEdges = rad + ".edges"
    newTrail = rad + ".trail"
    cmd7 = """%s -c %s -k %s -d %s -t %s %s %s %s""" % (
        FactorGraph, twins, UniqID, twinDir, trailFile, edges, newEdges,
        newTrail)
    printLog(
        "--------------------------------------------------\nRunning %s" %
        cmd7, handle)
    try:
        FG.Main(edgeFile=edges,
                outEdgeFile=newEdges,
                outTrailFile=newTrail,
                direct=twinDir,
                community=twins,
                comm_id=UniqID,
                in_trail=trailFile,
                sep=sep,
                log=handle,
                header=cmd7)
    except IOError as e:
        printLog("Error in %s: %s\nExiting." % (cmd7, e), handle)
        return ()
    os.chdir(twinDir)
    printLog(
        "--------------------------------------------------\ncd %s" % twinDir,
        handle)
    # b) Computing articulation points and biconnected components
    ART, BIC = getArticulationPoints(newEdges)
    artPoints = rad + ".art"
    aP = open(artPoints, "w")
    printLog(
        "--------------------------------------------------\nPrinting %d articulation points in %s"
        % (len(ART), artPoints), handle)
    for node in ART:
        outString = """%s\t%s\n""" % (node, ",".join(
            [str(ID) for ID in BIC[node]]))
        aP.write(outString)
    aP.close()
    bcNb = 0
    bicFile = rad + ".bic_comp"
    bC = open(bicFile, "w")
    for node in BIC:
        for ID in BIC[node]:
            bcNb = max(bcNb, ID)
            bC.write("""%s\t%d\n""" % (node, ID))
    bC.close()
    printLog(
        "--------------------------------------------------\nPrinting %d biconnected components in %s"
        % (bcNb + 1, bicFile), handle)
    ## D) annotations and twin component analysis
    if a:
        edges = rad + ".edges"
        twins = rad + ".twins"
        twinComp = rad + ".twin_comp"
        cmd9 = """%s -o %s -u 2 -c %s %s """ % (DetectTwins, twins, twinComp,
                                                edges)
        printLog(
            "--------------------------------------------------\nRunning %s" %
            cmd9, handle)
        try:
            DT.Main(edgeFile=edges,
                    outFile=twins,
                    sep=sep,
                    unilat='2',
                    comp=twinComp,
                    log=handle)
        except IOError as e:
            printLog("Error in %s: %s\nExiting." % (cmd9, e), handle)
            return ()
        runDescription(a,
                       radical=rad,
                       ID=UniqID,
                       keyList=keyList,
                       handle=handle,
                       config=config)