示例#1
0
    def __init__(self, file):
        [self.data, self.title] = helper.parse(file)

        if not self.data:
            raise Exception("Data cannot be empty")

        self.yPoints = [x.ele for x in self.data]
        self.xPoints = []
        last = 0
        for d in [x for x in self.data if x.dis != None]:
            last += d.dis
            self.xPoints.append(last)

        self.xMarkedPoints = []
        self.yMarkedPoints = []

        self.xMin = min(self.xPoints)
        self.xMax = max(self.xPoints)
        self.yMin = min(self.yPoints)
        self.yMax = max(self.yPoints)
def parse_initial(message):
	split_msg = helper.parse(message)
	# TODO: More effective manner of reordering. This is unacceptable.
	# Only for presentation purposes 
	tmp = [split_msg[2], split_msg[3], split_msg[0], split_msg[1]]
	return tuple(tmp)
示例#3
0
文件: check.py 项目: ymer/merge
    for wave_index, wave_map in enumerate(wave_maps):
        wave_map["i"] = wave_map.index

        df = pd.merge(merged_map, wave_map, on="SNP", how="left").dropna()

        df["previ"] = df.i.shift()
        df2 = df[~(df.previ + 1 == df.i)]

        if not pd.Index(df.i).is_monotonic:
            return False

    return True


s = h.parse()
s.wavepaths = h.replace(s)

wave_maps = h.read_wave_maps(s.wavepaths)
wave_fams = h.read_wave_fams(s.wavepaths)
merged_map = h.read_map(s.mergepath)
merged_fam = h.read_fam(s.mergepath)

include_inds = h.read_include_inds(s.indlist)
if include_inds:
    merged_fam = merged_fam[merged_fam["indID"].isin(include_inds)]
wave_inds, wave_snps = h.read_wave_dosages(s.wavepaths.filepaths)
merged_inds, merged_snps = h.read_dosage(s.mergepath)
merged_info = h.read_info(s.mergepath)

示例#4
0
 def eval(self, vm, code):
     proc = self.compiler.compile(helper.parse(code), vm.env)
     return vm.run(proc)
示例#5
0
                           maxlen=self.size)

    def output(self):
        for f in sorted(r.deque, key=lambda x: x.size, reverse=True):
            if self.human_read:
                f.size = sizeof_fmt(f.size)
            print("{0:<10} {1}".format(f.size, f.name))


class File(object):
    def __init__(self, name, size):
        self.name = name
        self.size = size


def sizeof_fmt(num, suffix='B'):
    for unit in ['', 'K', 'M', 'G', 'T', 'P', 'E', 'Z']:
        if abs(num) < 1024.0:
            return "%3.1f%s%s" % (num, unit, suffix)
        num /= 1024.0
    return "%.1f%s%s" % (num, 'Y', suffix)


if __name__ == "__main__":
    # In case of using it as a module these are the most likely commands
    from helper import parse
    args = parse()
    r = FileQueue(args.number, fullpath=args.fullpath, human_read=args.human)
    r.find(args.path)
    r.output()
示例#6
0
def parse_initial(message):
    split_msg = helper.parse(message)
    # TODO: More effective manner of reordering. This is unacceptable.
    # Only for presentation purposes
    tmp = [split_msg[2], split_msg[3], split_msg[0], split_msg[1]]
    return tuple(tmp)
示例#7
0
文件: task1.py 项目: shouc/cfg_to_cnf
            else:
                a[k] = "_"
                already_lambdad.append(key)
            return


def remove_epsilon(cfg):
    new_cfg = copy.deepcopy(cfg)
    already_lambdad = []
    while True:
        new_cfg, nullable_key = find_last_epsilon_keys(new_cfg)
        if not nullable_key:
            return new_cfg
        new_cfg = compensate_removal(new_cfg, nullable_key, already_lambdad)
        eddie_all(new_cfg)


# print(0)
# print(to_mentor(remove_epsilon(arr)))

# with open("a.cfg", "w") as fp:
#     fp.write(to_mentor(arr))
# with open("b.cfg", "w") as fp:
#     fp.write()
#
# import os
# os.system("./mentor a.cfg generate 100 > a.txt")
# os.system("./mentor b.cfg generate 100 > b.txt")
if __name__ == "__main__":
    spit(remove_epsilon(parse()))
示例#8
0
 def eval(self, vm, code):
     proc = self.compiler.compile(helper.parse(code), vm.env)
     return vm.run(proc)
示例#9
0
def parse_first_challenge(message):
	split_msg = helper.parse(message)
	print(split_msg)
	return tuple(split_msg)
示例#10
0
    
    # Only process if input is as expected to contain location, year, url
    if len(inputs) == 3:
      location  = inputs[0]
      year      = inputs[1]
      url       = inputs[2]

      # Retrieve data from URL
      data = helper.retrieveSourceFromURL(url)

      # Remove all newline carriages
      data = data.split('\r\n')
      data = "".join(data)

      # Parse data into proper format of list of n-tuples
      data = helper.parse(data, HEADERS_AND_PATTERNS[this_type][PATTERN])

      # Generate CSV file for result
      helper.writeCSV(
        this_type, 
        location, 
        year, 
        HEADERS_AND_PATTERNS[this_type][HEADER], 
        data)
    
    # Notify of invalid input parameters
    else:
      print "INVALID INPUT"


示例#11
0
文件: proxy.py 项目: hmmohsin/DAS
def runProxy(ipAddr):
    conf = config.config(configFile)
    conf.load_config()

    lport = conf.get_config('lport')
    #ipAddr = conf.get_config('ipAddr')

    nnIPAddr = conf.get_config('nnIPAddr')
    nnPort = conf.get_config('nnPort')
    HDFSDataDir = conf.get_config('HDFSDataDir')

    metaDataHandle = metadata.MetaData(nnIPAddr, nnPort, ipAddr)
    metaDataHandle.loadMetaData(HDFSDataDir)

    print "Ready"
    servSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
    servSock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
    servSock.setblocking(0)
    servSock.bind((ipAddr, lport))
    servSock.listen(10)
    connList = {}
    reqMsgList = {}

    epoll = select.epoll()
    epoll.register(servSock.fileno(), select.EPOLLIN)

    try:
        while True:
            events = epoll.poll(1)
            for sockDesc, event in events:
                if sockDesc == servSock.fileno():
                    print "Received new connection"
                    conn, addr = servSock.accept()
                    conn.setblocking(0)
                    epoll.register(conn.fileno(), select.EPOLLIN)
                    connList[conn.fileno()] = conn
                elif event & select.EPOLLHUP:
                    epoll.unregister(sockDesc)
                    connList[sockDesc].close()
                    del connList[sockDesc]
                elif event & select.EPOLLIN:
                    cSock = connList[sockDesc]
                    data = cSock.recv(1024)
                    if len(data) == 0:
                        print "Connection closed"
                        epoll.unregister(sockDesc)
                        connList[sockDesc].close()
                        del connList[sockDesc]
                        continue
                    if sockDesc in reqMsgList.keys():
                        reqMsgList[sockDesc] += data
                    else:
                        reqMsgList[sockDesc] = data

                    if len(reqMsgList[sockDesc]) == HDRLEN:
                        epoll.unregister(sockDesc)
                        msgHdr = reqMsgList[sockDesc]
                        msg = helper.parse(msgHdr)
                        if msg['reqType'] == REQUEST_GET:

                            del connList[sockDesc]
                            del reqMsgList[sockDesc]

                            blocksInfo = {}
                            filePath = helper.getObjPath(
                                msg['objectID'], HDFSDataDir)

                            print "Received new request for file %s" % filePath
                            if (metaDataHandle.fileExist(filePath)):

                                fileMeta = metaDataHandle.getFileMeta(filePath)
                                responseMsg = helper.createResponse(
                                    REQUEST_ACCEPT, msg['priority'],
                                    msg['objectID'], 0, fileMeta['numBytes'])
                                cSock.send(responseMsg)
                                newWorker = worker.worker(
                                    filePath, fileMeta, cSock)
                                newWorker.start()
                            else:
                                print "File %s not found" % filePath
                                responseMsg = helper.createResponse(
                                    REQUEST_NOTFOUND, msg['priority'],
                                    msg['objectID'], 0, 0)
                                cSock.send(responseMsg)
    except socket.error, msg:
        print "Some issue in running proxy. %s\n" % msg
示例#12
0
backend = os.environ.get('CK_IBM_BACKEND', 'ibmq_qasm_simulator')
timeout = int(os.environ.get('CK_IBM_TIMEOUT', 120))
shots = int(os.environ.get('CK_IBM_REPETITION', 10))
verbose = int(os.environ.get('CK_IBM_VERBOSE', 0)) != 0

api = IBMQuantumExperience(Qconfig.API_TOKEN, Qconfig.config, verify=True)

if verbose: print(api.backend_status(backend))

if verbose: print(api.get_my_credits())

# get qasm code to manage via ck too

#api.run_experiment(qasm, backend, shots, name=None, timeout)
valid = helper.parse(qasm_example_abs_path)
if not valid:
    print("Qsam Error")
    exit(1)

qasm_file = open(qasm_example_abs_path, 'r')
quantum_program = qasm_file.read()
qasm_file.close()
q = [{'qasm': quantum_program}]
## select q1 if you use api.run_experiment(qasm, backend, shots, name=None, timeout=60) . QSAM object for job
q1 = quantum_program
max_credits = 3
status = api.run_job(q, backend, shots, max_credits)

lc = api.get_last_codes()
#if verbose: lc qasms