Пример #1
0
def compose_async(song_key):
    model = get_model()

    while True:
        diversity = random.uniform(0.7, 1.0)
        sentence = '#' * MEMORY_LENGTH + 'X:'
        sentence = sentence[-MEMORY_LENGTH:]
        generated = 'X:'

        while True:
            x = np.zeros((1, MEMORY_LENGTH, len(model.chars)))
            for t, char in enumerate(sentence):
                x[0, t, model.char_indices[char]] = 1.

            preds = model.predict(x, verbose=0)[0]
            next_index = utils.sample(preds, diversity)
            next_char = model.indices_char[next_index]

            sentence = sentence[-MEMORY_LENGTH + 1:] + next_char
            generated += next_char

            if generated.endswith('$$$'):
                try:
                    song = Song.objects.get(key=song_key)
                    song.song = generated.rstrip('$')
                    song.save()
                    
                    writer.write(song_key)
                except WriterException:
                    break
                else:
                    return

            if len(generated) > MAX_SONG_LENGTH:
                break
Пример #2
0
def solve(problems):
    files = {'a': 'a_example.txt',
             'b': 'b_read_on.txt',
             'c': 'c_incunabula.txt',
             'd': 'd_tough_choices.txt',
             'e': 'e_so_many_books.txt',
             'f': 'f_libraries_of_the_world.txt'}

    pool = Pool()
    for f in problems:
        run_file = files[f]
        days_left, remaining_libs = reader.read('./inputs/' + run_file)
        outputs = []
        while days_left > 0 and len(remaining_libs) > 0:
            # Tuning:
            # For b, c, f: 50 is better than 0
            # For e: 0 is better than 50
            scores = pool.map(lambda x: x.get_score(days_left),
                              remaining_libs)
            next_lib = remaining_libs[np.argmax(scores)]
            _ = pool.map(lambda x: x.scan_copy(), next_lib.books.values())
            remaining_libs.remove(next_lib)
            next_lib.books = next_lib.avail_books(days_left)
            if not next_lib.books:
                continue
            _ = pool.map(lambda x: x.remove_dupes(next_lib.books.keys()),
                         remaining_libs)

            days_left = days_left - next_lib.signup
            outputs.append(next_lib)

        writer.write('./outputs/' + run_file, outputs)
        return scorer.score(run_file)
Пример #3
0
def compose_async(song_key):
    model = get_model()

    while True:
        diversity = random.uniform(0.7, 1.0)
        sentence = '#' * MEMORY_LENGTH + 'X:'
        sentence = sentence[-MEMORY_LENGTH:]
        generated = 'X:'

        while True:
            x = np.zeros((1, MEMORY_LENGTH, len(model.chars)))
            for t, char in enumerate(sentence):
                x[0, t, model.char_indices[char]] = 1.

            preds = model.predict(x, verbose=0)[0]
            next_index = utils.sample(preds, diversity)
            next_char = model.indices_char[next_index]

            sentence = sentence[-MEMORY_LENGTH + 1:] + next_char
            generated += next_char

            if generated.endswith('$$$'):
                try:
                    song = Song.objects.get(key=song_key)
                    song.song = generated.rstrip('$')
                    song.save()

                    writer.write(song_key)
                except WriterException:
                    break
                else:
                    return

            if len(generated) > MAX_SONG_LENGTH:
                break
Пример #4
0
def process(input_path, output_path, max_files, max_events, site_altitude,
            types, telescopes, site_location, choppoints, id_no,
            apply_quality_cuts):

    for typename in types:

        print("Processing", typename)

        # Get a list of the files for this source type
        files = glob.glob(input_path + typename + '/*.simtel.zst')
        files = files + glob.glob(input_path + typename + '/*.simtel.gz')

        if len(files) == 0:
            print("No ", typename, " files found")
            continue

        # Process the files
        telescope_events_data, array_events_data, runs_all, stereo, positions = process_type(
            files, max_files, max_events, site_altitude, telescopes,
            choppoints, apply_quality_cuts)

        # Skip writing if nothing was processed
        if telescope_events_data is None:
            print(typename,
                  "did not have any events output (maybe too low-energy?)")
            continue

        site_location.append(runs_all[0]['prod_site_alt'])

        writer.write(typename, output_path, site_location, array_events_data,
                     telescope_events_data, runs_all, positions, stereo, id_no)
Пример #5
0
def display():
    global strip

    modifier = 0.01
    color_filter = 0

    while True:
        computed_strip = []
        if color_filter < 0 or color_filter > 1:
            modifier = -modifier

        for led in strip:
            if led[0] == colors.STATIC:
                computed_strip.append(led[1])
                continue

            if led[0] == colors.BLINK:
                if modifier > 0:
                    computed_strip.append(led[1])
                else:
                    computed_strip.append(colors.BLACK)
                continue

            computed_strip.append(fade_color(led[1], color_filter))

        color_filter += modifier
        writer.write([fade_color(x, FADE) for x in computed_strip])
        time.sleep(0.01)
Пример #6
0
def song(request, key):
    song = get_object_or_404(Song.objects.all(), key=key)
    if not os.path.isfile(song.mp3_file) and song.is_composed:
        writer.write(key)

    return render_to_response('composer/song.html', {
        'song': song,
    }, RequestContext(request))
def song(request, key):
    song = get_object_or_404(Song.objects.all(), key=key)
    if not os.path.isfile(song.mp3_file) and song.is_composed:
        writer.write(key)

    return render_to_response('composer/song.html', {
        'song': song,
    }, RequestContext(request))
Пример #8
0
def process_file(solve, input_file):
    world = parse(input_file=os.path.join('./input_files', input_file))

    solution = solve(world)

    output_file = os.path.basename(
        os.path.splitext(input_file)[0]) + '_solution.txt'
    output_file = os.path.join('./output_files', output_file)
    write(solution, output_file)
Пример #9
0
def write(dirpath, crxpath, pemfile=None, pem=None):
    if pem is None:
        with open(pemfile) as fp:
            pem = fp.read()

    data = writer.zipdir(dirpath)
    der_key, signed = writer.sign(data, pem)

    with open(crxpath, 'w') as out:
        writer.write(out, data, der_key, signed)
Пример #10
0
    def _update_directory(self):

        text = QtWidgets.QFileDialog.getExistingDirectory(
            None, 'Select save folder', str(Path.home()),
            QtWidgets.QFileDialog.ShowDirsOnly)

        wt.clear(file_name='save_path.txt')
        wt.write(msg='{}'.format(text), file_name='save_path.txt')
        sd.path_refresh()
        self.download_info_dir.setText(sd.main_path)
Пример #11
0
    def run(self):

        # Read UNV file
        base_name = os.path.basename(self.unv_file_name)
        logging.info('Reading ' + base_name)
        fem = reader.UNV(self.unv_file_name).read()

        # Write INP file
        base_name = os.path.basename(self.inp_file_name)
        logging.info('Writing ' + base_name)
        writer.write(fem, self.inp_file_name)
Пример #12
0
def main(argv):

    path = 'C:/Users/sidsu/Downloads/FPL_AI/Data/'
    path = path + argv[1] + '/Individual_Player_Data/'
    players_data = pd.read_excel(
        'C:/Users/sidsu/Downloads/FPL_AI/Data/Players.xlsx', index_col=0)
    for file in os.listdir(path):
        name = file.split('.')[0]
        [_, players_data] = global_merger.check(name, argv[1], players_data)
    writer.write(players_data, 'C:/Users/sidsu/Downloads/FPL_AI/Data/',
                 'Players.xlsx')
Пример #13
0
def codegen(path, swagger_doc, force=False, appname=None):
    if appname is None:
        appname = path.split('/')[-1].replace('-', '_')

    with codecs.open(swagger_doc, 'r', 'utf-8') as f:
        m = parse_yaml(f)
    if not m:
        print 'swagger-doc could not be read.'
        exit(-1)

    writer.write(m, path, appname, force)
def download_chooser(download_url: str, audio_only=True):
    callback = get_audio if audio_only else get_video
    check_if_dirs_exists(main_sp=sd.main_path,
                         video_p=sd.vid_path,
                         music_p=sd.music_path)
    if check_if_playlist(download_url):
        url_list = get_from_playlist(download_url)
        for url in url_list:
            callback(url)
    else:
        callback(download_url)
    wt.write(str('FINISHED!!!\n'))
Пример #15
0
def main():
	print 'Brewing numbers... '
	writer.write('GOODNESS OF FIT TEST FOR FIVE DISTRIBUTIONS\n')
	goodness_of_fit_lcg.main()#question 1
	writer.append('\n')
	goodness_of_fit_rand.main()#question 2
	writer.append('\n')
	goodness_of_fit_boxmuller.main()#question 3
	writer.append('\n')
	goodness_of_fit_cauchy.main()#question 4
	writer.append('\n')
	goodness_of_fit_gamma.main()#question 5
	print 'Done. Results written to result/result.txt'
Пример #16
0
def write():
    question = "Do you want to add an entry? [y/n]"
    entries = list()

    while True:
        choice = io_main.query_yes_no(question)

        if choice == "y":
            entry = input()
            entries.append(list(entry))
        else:
            return

    hwriter.write('list.csv', entries)
Пример #17
0
def main():
	r = zeros((1e6,3))
	for i in range(r.shape[0]):
		for j in range(r.shape[1]):
			r[i,j] = random.exponential()
	c1 = r[:,0] + 2*r[:,1] + 3*r[:,2]
	# ques a
	id_a = c1 > 15
	#ques b
	id_b= c1 < 1
	out_a = r[id_a, 0] + 2*r[id_a, 1] + 3*r[id_a, 2]
	out_b = r[id_b, 0] + 2*r[id_b, 1] + 3*r[id_b, 2]
	writer.write('')
	writer.append('a: '+str(mean(out_a)))
	writer.append('b: '+str(mean(out_b)))
def get_audio(url):
    ydl_opts = {
        'format': 'bestaudio',
        'outtmpl': f'{sd.music_path}/%(title)s.mp3',
        'noplaylist': True,
        'progress_hooks': [my_hook],
    }

    with youtube_dl.YoutubeDL(ydl_opts) as ydl:
        vid = ydl.extract_info(url, download=False)
        print(str(vid['title']))
        wt.write(str('-----\nDownloading %s\n' % str(vid['title'])))
        ydl.download([url])
        print(str(vid['title']))
        wt.write('Finished downloading %s\n-----\n' % str(vid['title']))
Пример #19
0
def main():
    options, args = _parse_args()

    filename = args[0]

    imported = parser.parse(filename, options.read_format)

    if options.outfile is not None:
        outfile = open(options.outfile, "w")
    else:
        outfile = sys.stdout

    writer.write(imported, outfile, options.write_format)

    if options.outfile is not None:
        outfile.close()
Пример #20
0
def process_file(solve, input_file):
    print("processing %s" % (input_file, ))
    output_file = get_output_file(input_file)

    world = parse(input_file=os.path.join('./input_files', input_file))
    analyze_world(world)
    t0 = time.time()
    solution = solve(world)
    t1 = time.time()
    print("solution took %.1f sec" % (t1 - t0, ))
    score = calculate_score(world, solution)
    t2 = time.time()
    print("calculate score took %.1f sec" % (t2 - t1, ))
    print("SCORE: %d" % score)
    write(solution, output_file)
    return score
Пример #21
0
def get_tokens(data_type):
    folder = "./{0}".format(data_type)
    file_names = [f for f in listdir(folder) if ".txt" in f]
    final_list = []

    for file_name in file_names:
        file = open("{0}/{1}".format(folder, file_name), "r")
        text = file.read()
        lines = text.split("\n")

        for line in lines:
            tokens = nltk.word_tokenize(line.strip())

            for token in tokens:
                token = token.lower()

                if "//" in token:
                    continue

                if re.search("^[+\-\/.].*$", token):
                    token = token[1:]

                if token in stopword_list or not re.search(
                        '[a-zA-Z]', token) or re.search("[0-9]$", token):
                    continue

                final_list.append(token)

    final_list = sorted(final_list)
    ret_list = final_list

    data = ""
    final_list = sorted(set(final_list))

    for token in final_list:
        data += "{0}\n".format(token)

    writer.write("Tokens_{0}".format(data_type), data)
    return ret_list
Пример #22
0
def codegen(path, swagger_doc, force=False, appname=None, specification=True, ui=True):
    if appname is None:
        appname = path.split('/')[-1].replace('-', '_')

    with codecs.open(swagger_doc, 'r', 'utf-8') as f:
        m = parse_yaml(f)
    if not m:
        print 'swagger-doc could not be read.'
        exit(-1)

    if specification:
        import yaml
        import model
        res = model.Resource('/_swagger', m)
        method = model.Method('get', res)
        with codecs.open(swagger_doc, 'r', 'utf-8') as f:
            method.response_example = yaml.load(f)
        res.methods = {'GET': method}
        m.add_resource(res)

    writer.write(model=m, base_path=path, appname=appname,
                 overwrite=force, ui=ui)
Пример #23
0
def scrape_movies(movies, actor_name):
    # We need to rate limit ourselves doing this
    # This also give us time to parse through the html in a different thread?
    # Get the list of movie urls
    url_list = [(glob_url + m['URL'], m['Name']) for m in movies]
    final_list = []

    # Create a pool of workers and assign them each a url.
    print("Starting worker pool (10 workers)")
    with pool_context(processes=10) as pool:
        final_list = pool.map(partial(manip_movie, actor=actor_name), url_list)
    #print(final_list)
    print("Done working")

    # gonna try flattening that list, see what happens.
    final_list = filter(
        None, [i for sublist in filter(None, final_list) for i in sublist])
    #print(final_list)
    #for elm in final_list:
    #print(elm)

    #raise SyntaxError("DONE")
    out.write(final_list, actor_name)
    """
Пример #24
0
def ExportCMB(export_op):
    """Entry function, called by export operator"""
    params = export_op.parameters()
    logger = export_op.log()

    # Get the simulation attribute resource
    sim_atts = smtk.attribute.Resource.CastTo(params.find("attributes").value())
    if sim_atts is None:
        msg = "ERROR - No simulation attributes"
        print(msg)
        raise RuntimeError(msg)

    # Get output filepath
    output_file_item = params.findFile("output-file")
    output_file = output_file_item.value(0)

    # Create output folder if needed
    output_dir = os.path.dirname(output_file)
    if not os.path.exists(output_dir):
        os.makedirs(output_dir)

    from writer import ats_writer

    imp.reload(ats_writer)

    writer = ats_writer.ATSWriter(sim_atts)
    completed = writer.write(output_file)
    print("Writer completion status: %s" % completed)

    # In some runtime environments, stdout is null
    if sys.stdout is not None:
        sys.stdout.flush()

    # print('ats.py number of warnings:', len(writer.warning_messages))
    # for msg in writer.warning_messages:
    #     logger.addWarning(msg)

    return completed
Пример #25
0
def main():
	print("Starting...")
	writer.write("")
	montecarlo.main()
	walker.main()
	print("Done.\nResults saved in result/result.txt")
Пример #26
0
import writer

writer.write('html')
writer.write('txt')
Пример #27
0
from reader import read
from writer import write

if __name__ == '__main__':
    write()
    read()
Пример #28
0
        # cv2.imshow('Dilated', dilated)
        # cv2.imshow('Edges', edges)
        cv2.imshow('New', new)

        # box = cv2.boxPoints(rects)
        if (len(rects) != 0):
            for p in rects[0]:
                pt = (p[0], p[1])
                print(pt)
                cv2.circle(orig, pt, 5, (200, 0, 0), 2)
                transform_coordinates.append(list(pt))
            # sorted(transform_coordinates, key=lambda x: x[0])
            # transform_coordinates.sort(key = lambda transform_coordinates: transform_coordinates[0])

            transform_coordinates = sort_points(transform_coordinates)
            print(transform_coordinates)
            # del transform_coordinates[3]
            cv2.imshow("plank", orig)
            break

        # time.sleep(10)
        k = cv2.waitKey(30) & 0xff
        if (k == ord('q')):
            break

    # trans_M = get_TransformMatrix(transform_coordinates)
    writer.write(cap, transform_coordinates)

# cap.release()
cv2.destroyAllWindows()
Пример #29
0
    def __map_callback(self, msg):
        self.get_analyzer('UlMacLatencyAnalyzer').enable_mapping()

        # For each incoming PDCP packet, map it to an rlc packet and then get the waiting/processing delay accordingly.
        # print "called"
        if msg.type_id == "LTE_PDCP_UL_Cipher_Data_PDU":
            self.cnt1+=1
            # self.log_info(str(msg.timestamp))
            before_decode_time = time.time()
            if not self.__init_time:
                self.__init_time = time.time()
                self.get_analyzer('UlMacLatencyAnalyzer').set_init_time(time.time())
                self.get_analyzer('UlMacLatencyAnalyzer').set_sample_rate(self.sample_rate)
            if (not self.sample_on_t and ((self.cnt1+self.shift_l) % (1/self.pdcp_sample_rate) == 0)) or \
                (self.sample_on_t and self.__init_time and (( \
                before_decode_time - self.__init_time)  % 1 < self.pdcp_sample_rate_t)):
                self.log_cnt1 += 1

                log_item = msg.data.decode()
                self.log_info(str(log_item['timestamp']))
                self.__decode_delay += time.time() - before_decode_time
                before_ana_time = time.time()
                if 'Subpackets' in log_item:
                    subPkt = log_item['Subpackets'][0]
                    self.bytes1 += subPkt['Subpacket Size']
                    listPDU = subPkt['PDCPUL CIPH DATA']
                    for pduItem in listPDU:
                        if pduItem['Cfg Idx'] == 3:
                        # print pduItem
                        # sn = int(pduItem['SN'])
                            sys_fn = int(pduItem['Sys FN'])
                            sub_fn = int(pduItem['Sub FN'])
                            pdu_size = int(pduItem['PDU Size'])
                            self.pdcp_buffer.append([log_item['timestamp'], sys_fn*10+sub_fn, pdu_size, pdu_size])
                            # print 'New PDCP: ', log_item['timestamp'], sys_fn*10+sub_fn, pdu_size, pdu_size
                    self._ana_delay1 += time.time() - before_ana_time
                    self._ana_delay += time.time() - before_ana_time

        elif msg.type_id == "LTE_RLC_UL_AM_All_PDU":
            self.cnt2 += 1
            before_decode_time = time.time()
            if (not self.sample_on_t and (self.cnt2 % (1 / self.rlc_sample_rate) == 0)) or \
                    (self.sample_on_t and self.__init_time and (( \
                    before_decode_time - self.__init_time)  % 1 < self.rlc_sample_rate_t)):
                self.log_cnt2 += 1
                log_item = msg.data.decode()
                self.__decode_delay += time.time() - before_decode_time
                before_ana_time = time.time()
                # print log_item_dict
                before_ana_time221 = time.time()
                if 'Subpackets' in log_item:
                    subPkt = log_item['Subpackets'][0]
                    self.bytes2 += subPkt['Subpacket Size']
                    listPDU = subPkt['RLCUL PDUs']
                    self._ana_delay211 += time.time() - before_ana_time221
                    for pduItem in listPDU:
                        before_ana_time211 = time.time()
                        if pduItem['PDU TYPE'] == 'RLCUL DATA' and pduItem['rb_cfg_idx'] == 3:
                            sn = int(pduItem['SN'])
                            sys_fn = int(pduItem['sys_fn'])
                            sub_fn = int(pduItem['sub_fn'])
                            hdr_len = int(pduItem['logged_bytes'])  # rlc_pdu_size = pdcp_pdu_size + rlc_hdr_len
                            sdu_size = int(pduItem['pdu_bytes']) - hdr_len
                            li_flag = len(pduItem['RLC DATA LI']) if 'RLC DATA LI' in pduItem else 0
                            fi = pduItem['FI'] # FI: 01 stands for begining of segments, \
                                               # 10 stands for end of segments, 11 stands for middle segments
                            # TODO: check if all rlc packets seq # is ordered
                            # print log_item['timestamp'], sn, sys_fn*10 + sub_fn, sdu_size, fi, li_flag
                            if len(self.rlc_buffer) > 0 and sn - self.rlc_buffer[-1][0] > 1:
                                pass
                                # print "Debug info: ", self.rlc_buffer[-1][-1], sn
                            if fi == '01' or fi == '00':
                                self.rlc_buffer = [log_item['timestamp'], (sn, sys_fn*10 + sub_fn, sdu_size, li_flag)]
                            elif fi == '10' or fi == '11':
                                if self.rlc_buffer:
                                    self.rlc_buffer.append((sn, sys_fn*10 + sub_fn, sdu_size))
                                elif fi == '10': # A rlc segment starts while former one didn't end
                                    self.log_debug("Packet loss. Buffer=" + str(self.rlc_buffer))
                            else:
                                self.log_error("Unknown FI field in RLC_UL_AM_ALL_PDU.")
                            self._ana_delay211 += time.time() - before_ana_time211
                            before_ana_time212 = time.time()
                            if fi == '00' or fi == '10':
                                # print 'PDCP:', self.pdcp_buffer
                                # print 'RLC:', self.rlc_buffer
                                while self.pdcp_buffer and self.rlc_buffer and self.__sn_is_before(self.pdcp_buffer[0][0],
                                        self.pdcp_buffer[0][1], self.rlc_buffer[0],
                                        self.rlc_buffer[1][1]):
                                    # self.log_info("Warning: discarded PDCP packet. " + str(self.pdcp_buffer[0]))
                                    del self.pdcp_buffer[0]
                                while len(self.rlc_buffer) > 1 and self.pdcp_buffer:
                                    # print 'This round PDCP:', self.pdcp_buffer
                                    # print 'This round RLC: ', self.rlc_buffer
                                    if not self.pdcp_buffer:
                                        break
                                    if self.__sn_is_before(self.rlc_buffer[0], self.rlc_buffer[1][1], \
                                                           self.pdcp_buffer[0][0], self.pdcp_buffer[0][1], ):
                                        del self.rlc_buffer[1]
                                    else:
                                        # TODO: check if there are matched RLC packets
                                        # print rlc_sdu, pdcp_pdu
                                        rlc_sdu_size = self.rlc_buffer[1][2]
                                        if rlc_sdu_size > self.pdcp_buffer[0][3]:
                                            while self.pdcp_buffer and rlc_sdu_size > self.pdcp_buffer[0][3]:
                                                # matched
                                                # print 'PDCP: ', self.pdcp_buffer[0], '\nRLC: ', self.rlc_buffer[1]
                                                self.mapped_pdcp_rlc.append((self.rlc_buffer[0], \
                                                        self.pdcp_buffer[0][1], self.pdcp_buffer[0][2], \
                                                        self.__get_time_inter(self.rlc_buffer[1][1], \
                                                        self.pdcp_buffer[0][1])))
                                                rlc_sdu_size -= self.pdcp_buffer[0][3]
                                                del self.pdcp_buffer[0]
                                            if self.pdcp_buffer:
                                                if rlc_sdu_size == self.pdcp_buffer[0][3]:
                                                    # matched
                                                    # print 'PDCP: ', self.pdcp_buffer[0], '\nRLC: ', self.rlc_buffer[1]
                                                    self.mapped_pdcp_rlc.append((self.rlc_buffer[0], \
                                                        self.pdcp_buffer[0][1], self.pdcp_buffer[0][2], \
                                                        self.__get_time_inter(self.rlc_buffer[1][1], \
                                                        self.pdcp_buffer[0][1])))
                                                    del self.pdcp_buffer[0]
                                                    del self.rlc_buffer[1]
                                                else:
                                                    self.pdcp_buffer[0][3] -= rlc_sdu_size
                                                    del self.rlc_buffer[1]
                                        elif rlc_sdu_size == self.pdcp_buffer[0][2]:
                                            # matched
                                            self.mapped_pdcp_rlc.append((self.rlc_buffer[0], \
                                                    self.pdcp_buffer[0][1], self.pdcp_buffer[0][2], \
                                                    self.__get_time_inter(self.rlc_buffer[1][1], \
                                                    self.pdcp_buffer[0][1])))
                                            # print 'PDCP: ', self.pdcp_buffer[0], '\nRLC: ', self.rlc_buffer[1]
                                            del self.pdcp_buffer[0]
                                            del self.rlc_buffer[1]
                                        else:
                                            self.pdcp_buffer[0][3] -= rlc_sdu_size
                                            del self.rlc_buffer[1]
                                if len(self.rlc_buffer) == 1:
                                    self.rlc_buffer = []
                            self._ana_delay212 += time.time() - before_ana_time212

                    self._ana_delay21 += time.time() - before_ana_time
                    before_ana_time22 = time.time()
                    if self.mapped_pdcp_rlc:
                        # print 'PDCP and RLC: ', self.mapped_pdcp_rlc
                        # print 'MAC: ', self.get_analyzer('UlMacLatencyAnalyzer').lat_stat

                        before_ana_time221 = time.time()
                        mac_pkts = self.get_analyzer('UlMacLatencyAnalyzer').lat_stat

                        # self.log_debug("len(mac_pkts): "+str(len(mac_pkts)))

                        self._ana_delay221 += time.time() - before_ana_time221


                        # discard the pdcp packet if it arrives before current mac packet
                        while self.mapped_pdcp_rlc and mac_pkts:
                            before_ana_time222 = time.time()
                            while self.mapped_pdcp_rlc and mac_pkts \
                                    and self.__sn_is_before(self.mapped_pdcp_rlc[0][0], self.mapped_pdcp_rlc[0][1] \
                                            , mac_pkts[0][0], mac_pkts[0][1]):
                                # self.log_info("Warning: discarded PDCP packet. " + str(self.mapped_pdcp_rlc[0]))
                                del self.mapped_pdcp_rlc[0]
                            self._ana_delay222 += time.time() - before_ana_time222
                            before_ana_time223 = time.time()
                            while self.mapped_pdcp_rlc and mac_pkts \
                                    and self.__sn_is_before(mac_pkts[0][0], mac_pkts[0][1], \
                                            self.mapped_pdcp_rlc[0][0], self.mapped_pdcp_rlc[0][1]):
                                # self.log_info("Warning: discarded MAC packet. " + str(mac_pkts[0]))
                                del mac_pkts[0]
                            self._ana_delay223 += time.time() - before_ana_time223

                            if self.mapped_pdcp_rlc and mac_pkts:
                                before_ana_time224 = time.time()
                                pkt_size = self.mapped_pdcp_rlc[0][2]
                                trans_delay = self.mapped_pdcp_rlc[0][3]
                                wait_delay = mac_pkts[0][3]
                                if wait_delay > 4:
                                    wait_delay -= 4
                                    proc_delay = 4
                                else:
                                    proc_delay = wait_delay
                                    wait_delay = 0
                                self._ana_delay2241 += time.time() - before_ana_time224
                                before_ana_time2242 = time.time()
                                # kpi = {}
                                # kpi['pkt_size'] = str(pkt_size)
                                # kpi['wait_delay'] = str(wait_delay)
                                # kpi['proc_delay'] = str(proc_delay)
                                # kpi['trans_delay'] = str(trans_delay)
                                # self.broadcast_info('UL_LAT_BREAKDOWN', kpi)
                                self._ana_delay2242 += time.time() - before_ana_time2242
                                before_ana_time2243 = time.time()
                                # self.log_debug('UL_LAT_BREAKDOWN: ' + str(kpi))
                                self._ul_pkt_num += 1
                                # self.log_info(str(self._ul_pkt_num))
                                self._ana_delay2243 += time.time() - before_ana_time2243
                                before_ana_time2244 = time.time()
                                print "Mapped: ", self.mapped_pdcp_rlc[0][0], pkt_size, wait_delay, proc_delay, trans_delay
                                del self.mapped_pdcp_rlc[0]
                                del mac_pkts[0]
                                self._ana_delay2244 += time.time() - before_ana_time2244
                                self._ana_delay224 += time.time() - before_ana_time224
                    self._ana_delay22 += time.time() - before_ana_time22
                    self._ana_delay2 += time.time() - before_ana_time
                    self._ana_delay += time.time() - before_ana_time


        elif msg.type_id == "LTE_PHY_PUCCH_Tx_Report":
            self.cnt3 += 1
            before_decode_time = time.time()
            if (not self.sample_on_t and (self.cnt3 % (1 / self.phy_sample_rate) == 0)) or \
                    (self.sample_on_t and self.__init_time and (( \
                    before_decode_time - self.__init_time)  % 1 < self.phy_sample_rate_t)):
                self.log_cnt3 += 1
                log_item = msg.data.decode()
                self.__decode_delay += time.time() - before_decode_time
                before_ana_time = time.time()

                if 'Records' in log_item:
                    records = log_item['Records']
                    timestamp = str(log_item['timestamp'])

                    for record in records:
                        # pucch_tx_power = record['PUCCH Tx Power (dBm)']
                        # bcast_dict = {}
                        # bcast_dict['tx power'] = pucch_tx_power
                        # bcast_dict['timestamp'] = timestamp
                        # self.broadcast_info("PUCCH_TX_POWER", bcast_dict)
                        # self.log_info("PUCCH_TX_POWER: " + str(bcast_dict))
                        uciformat = record['Format']
                        if uciformat == 'Format 1':
                            self.init_flag = True
                            self.rb_slot1 = record['Start RB Slot 0']
                            self.rb_slot2 = record['Start RB Slot 1']
                            self.sr_sfn = record['Current SFN SF'] % 10  # subframenumber
                            self.sr_buffer.append([timestamp, record['Current SFN SF']])
                        elif uciformat == 'Format 1B' or uciformat == 'Format 1A':
                            # TODO: reset init_flag for new logs
                            if self.init_flag:
                                if int(record['Start RB Slot 1']) == self.rb_slot2 and int(record['Start RB Slot 0']) == self.rb_slot1 \
                                        and record['Current SFN SF'] % 10 == self.sr_sfn:
                                    self.sr_buffer.append([timestamp, record['Current SFN SF']])
                        elif uciformat == "Format 3":
                            # TODO: Deal with SR event in format 3
                            pass
                        if len(self.sr_buffer) > 40:
                            del self.sr_buffer[0]
                    self._ana_delay3 += time.time() - before_ana_time
                    self._ana_delay += time.time() - before_ana_time


        # get bsr and get mac harq retx delay
        elif msg.type_id == "LTE_MAC_UL_Transport_Block":
            self.cnt4 += 1
            before_decode_time = time.time()
            if (not self.sample_on_t and (self.cnt4 % (1 / self.mac_tx_sample_rate) == 0)) or \
                    (self.sample_on_t and self.__init_time and (( \
                    before_decode_time - self.__init_time)  % 1 < self.mac_tx_sample_rate_t)):
                self.log_cnt4 += 1
                log_item = msg.data.decode()
                self.__decode_delay += time.time() - before_decode_time
                before_ana_time = time.time()
                ts = str(log_item['timestamp'])

                # self.log_info(str(log_item))
                if 'Subpackets' in log_item:
                    self.bytes4 += log_item['Subpackets'][0]['SubPacket Size']
                    for pkt in log_item['Subpackets'][0]['Samples']:
                        grant = pkt['Grant (bytes)']
                        harq_id = pkt['HARQ ID']
                        pkt_size = grant - pkt['Padding (bytes)']
                        fn = int(pkt['SFN'])
                        sfn = int(pkt['Sub-FN'])
                        # self.log_info(str(pkt))
                        cell_id = 0 #int(pkt['Cell Id'])
                        self.bsr_buffer.append((ts, fn, sfn))
                        if self.mac_buffer[cell_id*8+harq_id-1] != []:
                            pkt_alias = self.mac_buffer[cell_id*8+harq_id-1]
                            self.trans_delay.append((pkt_alias[1], pkt_alias[2], pkt_alias[3], self.__get_time_inter(pkt_alias[2]*10 + pkt_alias[3], fn *10 + sfn)))
                        self.mac_buffer[cell_id*8+harq_id-1] = (pkt_size,ts,fn,sfn)
                        if len(self.trans_delay) > 40:
                            del self.trans_delay[0]

                self._ana_delay4 += time.time() - before_ana_time
                self._ana_delay += time.time() - before_ana_time


            if self._debug:
                self.log_info('decode ' + str(self.__decode_delay))
                self.log_info('ana ' + str(self._ana_delay))
                self.log_info('ana1 ' + str(self._ana_delay1))
                self.log_info('ana2 ' + str(self._ana_delay2))
                self.log_info('ana21 ' + str(self._ana_delay21))
                self.log_info('ana211 ' + str(self._ana_delay211))
                self.log_info('ana212 ' + str(self._ana_delay212))
                self.log_info('ana22 ' + str(self._ana_delay22))
                self.log_info('ana221 ' + str(self._ana_delay221))
                self.log_info('ana222 ' + str(self._ana_delay222))
                self.log_info('ana223 ' + str(self._ana_delay223))
                self.log_info('ana224 ' + str(self._ana_delay224))
                # self.log_info('ana2241 ' + str(self._ana_delay2241))
                # self.log_info('ana2242 ' + str(self._ana_delay2242))
                # self.log_info('ana2243 ' + str(self._ana_delay2243))
                # self.log_info('ana2244 ' + str(self._ana_delay2244))
                self.log_info('ana3 ' + str(self._ana_delay3))
                self.log_info('ana4 ' + str(self._ana_delay4))
                self.log_info('cnt1 ' + str(self.cnt1))
                self.log_info('cnt2 ' + str(self.cnt2))
                self.log_info('cnt3 ' + str(self.cnt3))
                self.log_info('cnt4 ' + str(self.cnt4))
                self.log_info('log_cnt1 ' + str(self.log_cnt1))
                self.log_info('log_cnt2 ' + str(self.log_cnt2))
                self.log_info('log_cnt3 ' + str(self.log_cnt3))
                self.log_info('log_cnt4 ' + str(self.log_cnt4))
                self.log_info('bytes1 ' + str(self.bytes1))
                self.log_info('bytes2 ' + str(self.bytes2))
                self.log_info('bytes4 ' + str(self.bytes4))
                from writer import write
                write(",".join(["UL_LAT_BD", str(self.__decode_delay),str(self._ana_delay), str(self._ana_delay1),str(self._ana_delay2), str(self._ana_delay21), str(self._ana_delay211), str(self._ana_delay212), str(self._ana_delay22), str(self._ana_delay221), str(self._ana_delay222), str(self._ana_delay223), str(self._ana_delay224), str(self._ana_delay3), str(self._ana_delay4), str(self.cnt1), str(self.cnt2), str(self.cnt3), str(self.cnt4),str(self.log_cnt1), str(self.log_cnt2), str(self.log_cnt3), str(self.log_cnt4), str(self.bytes1),  str(self.bytes2),  str(self.bytes4)]) +"/n")
Пример #30
0
 def check_save_file(self):
     if not wt.check_if_file_has_data('./save_path.txt'):
         path = str(Path.home() / 'Documents')
         wt.write(msg=path, file_name='./save_path.txt')
Пример #31
0
    try:
        read_ok = reader.read(args[0])
    finally:
        if xmlparser.cache != None:
            xmlparser.cache.close()
            xmlparser.cache = None

    if not read_ok:
        sys.exit(1)

    if options.dump:
        import mk_dump
        mk_dump.dumpMakefile()
    else:
        if not writer.write():
            sys.exit(1)

    if config.track_deps:
        import dependencies
        dependencies.save(config.deps_file)


if __name__ == '__main__':
    if sys.version_info[0:3] < (2, 3, 0):
        sys.stderr.write('error: Bakefile requires at least Python 2.3.0\n')
        sys.exit(1)

    do_profiling = 0  # set to 1 if debugging bottlenecks

    try:
Пример #32
0
    
    try:
        read_ok = reader.read(args[0])
    finally:
        if xmlparser.cache != None:
            xmlparser.cache.close()
            xmlparser.cache = None

    if not read_ok:
        sys.exit(1)

    if options.dump:
        import mk_dump
        mk_dump.dumpMakefile()
    else: 
        if not writer.write():
            sys.exit(1)

    if config.track_deps:
        import dependencies
        dependencies.save(config.deps_file)

if __name__ == '__main__':
    if sys.version_info[0:3] < (2,3,0):
        sys.stderr.write('error: Bakefile requires at least Python 2.3.0\n')
        sys.exit(1)

    do_profiling = 0 # set to 1 if debugging bottlenecks
    
    try:
        if do_profiling:
Пример #33
0
def split(text, error=False, log_only=False, print_only=False):
    """Logs to both stdout and a log file, using both the writer, and the logger module
    """
    if not log_only: writer.write(text)
    if not print_only: lg.log(text, error=error)
Пример #34
0
def parse_data(data):
    globals = data.globals
    name = data.name
    version = data.version
    models = []
    functions = data.functions
    for i in data.models:
        insert_model(models, i)

    return {'models':models, 'globals':globals, 'name':name, 'version': version,
            'functions': functions}



if __name__ == '__main__':
    parser = argparse.ArgumentParser(description='Run The Codegen To Automatically Generate Some Codez')
    parser.add_argument('-d', '--data', dest='data_path', default='./data.yaml', help='The path to data.yaml')
    parser.add_argument('-o', '--output', dest='out_path', default='./output', help='The output of the codegen.')
    parser.add_argument('-t', '--template', dest='template_path', default='./templates', help='The location of the templates')

    args = parser.parse_args()

    data = data.load(args.data_path)

    objects = parse_data(data)
    output = args.out_path
    templates = args.template_path
    
    write(templates, output, objects)

Пример #35
0
#File: main.py
#Description: Random Number reader and writer
#Author: Raymond Laughrey
#Email: [email protected]
#Date of Creation: Sat Nov 23 16:02:15 PDT 2019

import writer
import reader
import os

writer.write()
reader.read()
os.remove('randomnumbers.txt')  #Comment out this line to keep the file.
Пример #36
0
 def save(self, password=None, prettify=True):
     error = write(self.__name, self.__data, password, prettify)
     if error:
         raise Exception(error)
Пример #37
0
def save():
    # Convert the list of users to string (Serialize).
    data = json.dumps(__user_list)
    # Write the data to the file.
    writer.write(DATA_FILE, data)
def my_hook(d):
    if d['status'] == 'finished':
        wt.write('Done downloading, now converting ...\n')
Пример #39
0
    def __map_callback(self, msg):

        #order packets by sequence number
        #allowdup: if duplicated sequence number is allowed
        def __order_by_SN(log_items, allowdup=True):
            if allowdup:
                return sorted(log_items, key=lambda x: x["SN"])
            else:
                log_items = sorted(log_items, key=lambda x: x["SN"])
                if (len(log_items) < 4):
                    return log_items
                #removing repeated items
                #following code equivalent to:
                #log_items[:] = [log_items[i] for i in range(0, len(log_items)-1) if log_items[i]["SN"] != log_items[i+1]["SN"]]
                del_element = []
                for i in range(len(log_items) - 1):
                    if (log_items[i]["SN"] == log_items[i + 1]["SN"]):
                        del_element.append(i)
                log_items = [
                    i for j, i in enumerate(log_items) if j not in del_element
                ]
                return log_items

        #check if sequence number of a list is incremental
        def check_SN_is_incremental(l):
            if l == []:
                return True

            #in case of double indexing
            if len(l) >= 3 and [a["SN"] for a in l[1:]] == list(
                    range(l[1]["SN"], l[-1]["SN"] + 1)):
                return True

            #checking the sequence is a list increases only by 1
            if [a["SN"] for a in l] == list(range(l[0]["SN"],
                                                  l[-1]["SN"] + 1)):
                return True
            else:
                return False

        cfg_idx = ""
        if msg.type_id == "LTE_PDCP_DL_Cipher_Data_PDU":
            message = msg.data.decode()
            write(str(message), action="direct")
            write("something /n", action="direct")
            if (message == None):
                return
            log_item = message["Subpackets"][0]
            for pduitem in log_item["PDCPDL CIPH DATA"]:
                cfg_idx = pduitem["Cfg Idx"]
                ret = self.config_to_pdcp.setdefault(cfg_idx, [])
                #initialize corresponding index without harming existing index
                self.updated_SN.setdefault(cfg_idx, False)
                pdcp = dict()
                pdcp["timestamp"] = message["timestamp"]
                pdcp["PDU Size"] = pduitem[
                    "PDU Size"]  # - pduitem["Logged Bytes"]
                pdcp["sys/sub_fn"] = (pduitem["Sys FN"], pduitem["Sub FN"])
                pdcp["SN"] = pduitem["SN"]
                #list of rlc packets matched by this pdcp packet
                pdcp["rlc"] = list()
                #a counter to account for decomposed rlc packet
                pdcp["decomposed"] = 0
                #print("pdcp size", pdcp["PDU Size"])
                self.config_to_pdcp[cfg_idx].append(pdcp)
                #if the SN is not incremental, that means we have a handover/gap in the records
                if not check_SN_is_incremental(
                        __order_by_SN(self.config_to_pdcp[cfg_idx])):
                    #print(__order_by_SN(self.config_to_pdcp[cfg_idx]))
                    #print("Triggered")
                    self.updated_SN[cfg_idx] = True
                self.discarded_packets_stat["pdcp"] += 1

        if msg.type_id == "LTE_RLC_DL_AM_All_PDU":
            #to jump the header records if needed
            if (self.header != 0):
                self.header -= 1
                return
            message = msg.data.decode()
            if (message == None):
                return
            log_item = message["Subpackets"][0]

            #sometimes, the order of RLC is not guranteed
            #removing duplicated sequence numbersx
            size = 0
            for pduitem in __order_by_SN(log_item["RLCDL PDUs"], False):
                if (pduitem["Status"] != "PDU DATA"):
                    #opt out "PCU CTRL"
                    continue

                cfg_idx = pduitem["rb_cfg_idx"]

                #initialization of dictionary to prevent access value error
                self.last_packet_processed.setdefault(cfg_idx, -1)
                self.not_ordered_packet.setdefault(cfg_idx, [])
                self.config_to_rlc.setdefault(cfg_idx, [])
                self.updated_SN.setdefault(cfg_idx, False)
                self.last_round_complete.setdefault(cfg_idx, True)

                #there might be one missing packet that leads to an accumulation of errors
                if (len(self.not_ordered_packet[cfg_idx]) > 80):
                    self.last_ordered_packet[cfg_idx] = max(
                        self.not_ordered_packet[cfg_idx][39]["SN"],
                        self.last_ordered_packet[cfg_idx])
                    self.config_to_rlc[cfg_idx] += self.not_ordered_packet[
                        cfg_idx][:40]
                    self.not_ordered_packet[cfg_idx] = self.not_ordered_packet[
                        cfg_idx][40:]
                    if (self.config_to_rlc[cfg_idx][-1]["FI"][-1] == "0"):
                        self.last_round_complete[cfg_idx] = True
                    else:
                        self.last_round_complete[cfg_idx] = False

                if (self.updated_SN[cfg_idx]):
                    #the last system time of pdcp, use it to track rlc packets
                    std_sys = self.config_to_pdcp[cfg_idx][-1]["sys/sub_fn"][0]
                    #the last pdcp's SN, use it to track other pdcp packets
                    std_sn = self.config_to_pdcp[cfg_idx][-1]["SN"]

                    #pdcp/rlc before handover
                    rlc_pre = []
                    for i in range(
                            len(self.config_to_pdcp[cfg_idx]) - 1, -1, -1):
                        #comparing by back-counting elements
                        if self.config_to_pdcp[cfg_idx][i]["SN"] != std_sn - (
                                len(self.config_to_pdcp[cfg_idx]) - i - 1):
                            self.config_to_pdcp[cfg_idx].remove(
                                self.config_to_pdcp[cfg_idx][i])

                    #use interval of pdcp to get a range of correct rlc time
                    mi = min([
                        i["sys/sub_fn"][0]
                        for i in self.config_to_pdcp[cfg_idx]
                    ])
                    ma = max([
                        i["sys/sub_fn"][0]
                        for i in self.config_to_pdcp[cfg_idx]
                    ])

                    #a modifiable metric to rule out rlc
                    def metric(i):
                        return i["sys/sub_fn"][0] < mi or i["sys/sub_fn"][
                            0] > ma

                    for i in self.config_to_rlc[cfg_idx][:]:
                        if (metric(i)):
                            self.config_to_rlc[cfg_idx].remove(i)
                    for i in self.not_ordered_packet[cfg_idx][:]:
                        if (metric(i)):
                            rlc_pre.append(i)
                            self.not_ordered_packet[cfg_idx].remove(i)
                    #self.config_to_rlc[cfg_idx] += rlc_pre
                    self.config_to_rlc[cfg_idx] = __order_by_SN(
                        self.config_to_rlc[cfg_idx])
                    #determine last ordered packet
                    #if there is no correct rlc packets, then simply reset it
                    if self.config_to_rlc[cfg_idx]:
                        self.last_ordered_packet[cfg_idx] = self.config_to_rlc[
                            cfg_idx][-1]["SN"]
                    elif self.not_ordered_packet[cfg_idx]:
                        self.last_ordered_packet[
                            cfg_idx] = self.not_ordered_packet[cfg_idx][0][
                                "SN"] - 1
                    else:
                        self.last_ordered_packet.pop(cfg_idx, None)
                    self.updated_SN[cfg_idx] = False

                d = dict()
                d["timestamp"] = message["timestamp"]
                d["PDU Size"] = pduitem["pdu_bytes"] - pduitem["logged_bytes"]
                d["block_size"] = int(pduitem["pdu_bytes"])
                d["sys/sub_fn"] = (pduitem["sys_fn"], pduitem["sub_fn"])
                d["SN"] = pduitem["SN"]
                d["FI"] = pduitem["FI"]
                size += d["PDU Size"]

                #TODO: delete me, the following code only works for VR_example since it has a jump in time
                '''
                if(d["SN"] == 497):
                    d["PDU Size"] += 125 
                '''

                #to spot incomplete packets(e.g. previous 01 and current 00)
                #then thow previous one away
                self.last_ordered_packet.setdefault(cfg_idx, d["SN"] - 1)

                #print("last ordered:", self.last_ordered_packet[cfg_idx])
                #print("current:", d["SN"])

                #if SN larger, that means some packets have not arrived
                if (d["SN"] > self.last_ordered_packet[cfg_idx] + 1):
                    self.not_ordered_packet[cfg_idx].append(d)
                elif (d["SN"] < self.last_ordered_packet[cfg_idx] + 1):
                    #if SN is 0 and last one is 0(just is case we have two SN=0 packet
                    if (d["SN"] == 0
                            and self.last_ordered_packet[cfg_idx] != 0):
                        self.not_ordered_packet[cfg_idx].append(d)
                        self.last_ordered_packet[cfg_idx] = 0
                    elif (self.not_ordered_packet[cfg_idx]
                          and self.not_ordered_packet[cfg_idx][0]["SN"] == 0):
                        self.config_to_rlc[cfg_idx] += self.not_ordered_packet[
                            cfg_idx]
                        self.not_ordered_packet[cfg_idx] = [d]
                        #self.not_ordered_packet[cfg_idx].append(d)
                    else:
                        #resend this packet with a complete version
                        if(d["SN"] == self.last_ordered_packet[cfg_idx]\
                           and self.config_to_rlc[cfg_idx] \
                           and self.config_to_rlc[cfg_idx][-1]["FI"][-1] == '1'\
                           and d["FI"][-1] == "0"):
                            del self.config_to_rlc[cfg_idx][-1]
                            self.config_to_rlc[cfg_idx].append(d)
                            #Not Common for out of window packets
                        continue
                else:
                    assert (d["SN"] == self.last_ordered_packet[cfg_idx] + 1)
                    if (self.last_round_complete[cfg_idx]):
                        self.config_to_rlc[cfg_idx].append(d)
                        self.last_ordered_packet[cfg_idx] = self.config_to_rlc[
                            cfg_idx][-1]["SN"]
                    else:
                        self.not_ordered_packet[cfg_idx].append(d)

                #print(self.not_ordered_packet[cfg_idx])
                #print(self.config_to_rlc[cfg_idx])
                #print([i["SN"] for i in self.not_ordered_packet[cfg_idx]])

                if (pduitem["FI"][-1] == "0"):
                    self.last_round_complete[cfg_idx] = True
                else:
                    self.last_round_complete[cfg_idx] = False
                    continue

                if (check_SN_is_incremental(
                    [{
                        "SN": self.last_ordered_packet[cfg_idx]
                    }] + __order_by_SN(self.not_ordered_packet[cfg_idx]))):
                    self.config_to_rlc[cfg_idx] += self.not_ordered_packet[
                        cfg_idx]
                    self.last_ordered_packet[cfg_idx] = self.config_to_rlc[
                        cfg_idx][-1]["SN"]
                    self.not_ordered_packet[cfg_idx] = []
                    self.config_to_rlc[cfg_idx] = __order_by_SN(
                        self.config_to_rlc[cfg_idx])
                else:
                    self.last_round_complete[cfg_idx] = False
                    continue

        if (msg.type_id == "LTE_PHY_PDSCH_Stat_Indication"):
            log_item = msg.data.decode()
            if (log_item == None):
                return
            timestamp = log_item['timestamp']
            for item in log_item["Records"]:
                cell_id_str = item['Serving Cell Index']
                if cell_id_str not in self.cell_id:
                    self.cell_id[cell_id_str] = self.idx
                    cell_idx = self.idx
                    self.idx += 1
                else:
                    cell_idx = self.cell_id[cell_id_str]
                sn = int(item['Frame Num'])
                sfn = int(item['Subframe Num'])
                sn_sfn = sn * 10 + sfn
                for blocks in item['Transport Blocks']:
                    harq_id = int(blocks['HARQ ID'])
                    tb_idx = int(blocks['TB Index'])
                    is_retx = True if blocks['Did Recombining'][
                        -2:] == "es" else False
                    crc_check = True if blocks['CRC Result'][
                        -2:] == "ss" else False
                    tb_size = int(blocks['TB Size'])
                    rv_value = int(blocks['RV'])
                    id = harq_id + cell_idx * 8 + tb_idx * 24
                    retx = False
                    #note: following codes are adapted from Mac analyzer
                    if not crc_check:  # add retx instance or add retx time for existing instance
                        cur_fail = [
                            timestamp, cell_idx, harq_id, tb_idx, tb_size,
                            False, 0, False, sn_sfn
                        ]
                        if self.failed_harq[id] != 0:
                            if rv_value > 0:
                                self.failed_harq[id][6] += 1
                            else:
                                self.failed_harq[id][-2] = True
                                delay = sn_sfn - self.failed_harq[id][-1]
                                d = {}
                                d['block_size'] = self.failed_harq[id][4]
                                d['timestamp'] = timestamp
                                d["sys/sub_fn"] = (sn, sfn)
                                d['delay'] = delay
                                #RLC
                                self.latency_blocks.append(d)
                                retx = True
                        elif rv_value == 0:
                            self.failed_harq[id] = cur_fail
                    else:
                        if self.failed_harq[id] != 0:
                            if rv_value > 0 or is_retx:
                                self.failed_harq[id][6] += 1
                                self.failed_harq[id][-4] = True
                                delay = sn_sfn - self.failed_harq[id][-1]
                                d = {}
                                d['block_size'] = self.failed_harq[id][4]
                                d['timestamp'] = timestamp
                                d["sys/sub_fn"] = (sn, sfn)
                                d['delay'] = delay
                                retx = True
                                #MAC retx
                                self.latency_blocks.append(d)
                            else:
                                self.failed_harq[id][-2] = True
                                delay = sn_sfn - self.failed_harq[id][-1]
                                d = {}
                                d['block_size'] = self.failed_harq[id][4]
                                d['timestamp'] = timestamp
                                d["sys/sub_fn"] = (sn, sfn)
                                d['delay'] = delay
                                retx = True
                                #RLC retx
                                self.latency_blocks.append(d)
                            self.failed_harq[id] = 0
                    #retransmission does not happen, delay is 0
                    if not retx:
                        d = {}
                        d['block_size'] = tb_size
                        d['timestamp'] = timestamp
                        d["sys/sub_fn"] = (sn, sfn)
                        d['delay'] = 0
                        self.latency_blocks.append(d)

        if not self.last_round_complete.setdefault(cfg_idx, True):
            return

        #mapping from pdcp to rlc
        self.config_to_rlc.setdefault(cfg_idx, [])
        self.config_to_pdcp.setdefault(cfg_idx, [])

        while self.config_to_rlc[cfg_idx] and self.config_to_pdcp[cfg_idx]:
            #deleting out of phase packets
            threshold = 0.4
            while self.config_to_rlc[cfg_idx] and self.config_to_pdcp[
                    cfg_idx] and self.__sn_is_before(
                        self.config_to_rlc[cfg_idx][0]["timestamp"],
                        self.config_to_rlc[cfg_idx][0]["sys/sub_fn"],
                        self.config_to_pdcp[cfg_idx][0]["timestamp"],
                        self.config_to_pdcp[cfg_idx][0]["sys/sub_fn"],
                        threshold=threshold,
                        diff=True):
                #print("deleted", self.config_to_rlc[cfg_idx][0]["SN"], self.config_to_rlc[cfg_idx][0]["PDU Size"] )
                del self.config_to_rlc[cfg_idx][0]

            while self.config_to_rlc[cfg_idx] and self.config_to_pdcp[
                    cfg_idx] and self.__sn_is_before(
                        self.config_to_pdcp[cfg_idx][0]["timestamp"],
                        self.config_to_pdcp[cfg_idx][0]["sys/sub_fn"],
                        self.config_to_rlc[cfg_idx][0]["timestamp"],
                        self.config_to_rlc[cfg_idx][0]["sys/sub_fn"],
                        threshold=threshold,
                        diff=True):
                #raw_input("Warning, deleted pdcp")
                self.discarded_packets_stat["rlc"] += 1
                del self.config_to_pdcp[cfg_idx][0]

            while self.config_to_rlc[cfg_idx] and self.config_to_pdcp[cfg_idx]:
                rlc = self.config_to_rlc[cfg_idx]
                pdcp = self.config_to_pdcp[cfg_idx]
                #print("{}:{}:{}".format("pdcp", pdcp[0]["PDU Size"], str(pdcp[0]["SN"])))
                #print("{}:{}:{}".format("rlc", rlc[0]["PDU Size"],str(rlc[0]["SN"])))
                #note: following comment is essential for debugging
                #print(pdcp[0]["PDU Size"] - rlc[0]["PDU Size"])

                if (rlc[0]["PDU Size"] == pdcp[0]["PDU Size"]):
                    #perfectly matched
                    pdcp[0]["rlc"].append(rlc[0])
                    self.unmapped_pdcp_rlc.append((rlc[0], pdcp[0]))
                    #print("Perfectly Matched")
                    #debug_print(self.unmapped_pdcp_rlc[-1])
                    del self.config_to_rlc[cfg_idx][0]
                    del self.config_to_pdcp[cfg_idx][0]

                elif (rlc[0]["PDU Size"] < pdcp[0]["PDU Size"]):
                    #pdcp is decomposed
                    pdcp[0]["rlc"].append(rlc[0])
                    self.config_to_pdcp[cfg_idx][0]["PDU Size"] -= rlc[0][
                        "PDU Size"]
                    del self.config_to_rlc[cfg_idx][0]
                else:
                    #rlc is decomposed
                    self.config_to_rlc[cfg_idx][0]["PDU Size"] -= pdcp[0][
                        "PDU Size"]
                    pdcp[0]["decomposed"] += 1
                    pdcp[0]["rlc"].append(rlc[0])
                    self.unmapped_pdcp_rlc.append((rlc[0], pdcp[0]))
                    del self.config_to_pdcp[cfg_idx][0]

        #mapping mapped pdcp-rlc tuples to latency blocks
        while self.unmapped_pdcp_rlc and self.latency_blocks:
            while self.unmapped_pdcp_rlc and self.latency_blocks and self.__sn_is_before(
                    self.unmapped_pdcp_rlc[0][1]["rlc"][0]["timestamp"],
                    self.unmapped_pdcp_rlc[0][1]["rlc"][0]["sys/sub_fn"],
                    self.latency_blocks[0]["timestamp"],
                    self.latency_blocks[0]["sys/sub_fn"]):
                self.discarded_packets_stat["mapped_pdcp_rlc"] += 1
                #print("unmapped")
                self.mapped_all.append((self.unmapped_pdcp_rlc[0][1],
                                        self.unmapped_pdcp_rlc[0][0], {
                                            "delay": 0
                                        }))
                del self.unmapped_pdcp_rlc[0]
            while self.unmapped_pdcp_rlc and self.latency_blocks and self.__sn_is_before(
                    self.latency_blocks[0]["timestamp"],
                    self.latency_blocks[0]["sys/sub_fn"],
                    self.unmapped_pdcp_rlc[0][1]["rlc"][0]["timestamp"],
                    self.unmapped_pdcp_rlc[0][1]["rlc"][0]["sys/sub_fn"]):
                #print("deleted mac")
                #print(self.latency_blocks[0])
                self.discarded_packets_stat["latency_block"] += 1
                del self.latency_blocks[0]
            if (self.latency_blocks and self.unmapped_pdcp_rlc):
                self.mapped_all.append(
                    (self.unmapped_pdcp_rlc[0][1],
                     self.unmapped_pdcp_rlc[0][0], self.latency_blocks[0]))
                #print(self.mapped_all[-1])
                if (self.unmapped_pdcp_rlc[0][1]["decomposed"] > 0):
                    self.unmapped_pdcp_rlc[0][1]["decomposed"] -= 1
                    #print("d_mapped")
                else:
                    #print("mapped")
                    del self.latency_blocks[0]
                del self.unmapped_pdcp_rlc[0]

        # print("unmapped pdcp rlc tuple : " + str(len(self.unmapped_pdcp_rlc)))
        # print("pdcp rlc mac tuple:   " + str(len(self.unmapped_pdcp_rlc)))
        # print("latency block number: " + str(len(self.latency_blocks)))
        # print("ultimate mapping:     " + str(len(self.mapped_all)))

        #upload everything
        while (self.mapped_all):
            pdcp = self.mapped_all[0][0]
            rlc = self.mapped_all[0][1]
            mac = self.mapped_all[0][2]

            #helper function for getting time interval
            def get_time_inter((a_sys_fn, a_sub_fn), (b_sys_fn, b_sub_fn)):
                a = a_sys_fn * 10 + a_sub_fn
                b = b_sys_fn * 10 + b_sub_fn
                inter = abs(a - b)
                if inter > 10000:
                    inter = abs(inter - 10240)
                return inter

            def get_packet_size(pdcp):
                if (len(pdcp["rlc"]) == 1):
                    return pdcp["PDU Size"]
                else:
                    sum = 0
                    for i in pdcp["rlc"][:-1]:
                        sum += i["PDU Size"]
                    return sum + pdcp["PDU Size"]

            kpi = dict()
            kpi["name"] = "DL_LAT_BD"
            kpi["ts"] = pdcp["timestamp"]
            kpi["pkt_size"] = get_packet_size(pdcp)
            kpi["mac_retx_time"] = mac["delay"]

            if (pdcp["rlc"] == []
                ):  #only one rlc, no multiple rlc transmission time
                kpi["rlc_wait_time"] = 0
            else:
                kpi["rlc_wait_time"] = get_time_inter(
                    rlc["sys/sub_fn"], pdcp["rlc"][0]["sys/sub_fn"]
                )  #last pdu receipt time - first pdu receipt time
            kpi["pdcp_reordering_time"] = get_time_inter(
                rlc["sys/sub_fn"],
                pdcp["sys/sub_fn"])  #pdcp layer time - last pdu receipt time
            #print(kpi)
            #if(kpi["pdcp_reordering_time"] > 3000 or kpi["rlc_wait_time"] > 3000):
            #debug_print(pdcp)
            #debug_print(kpi)
            #raw_input("what")
            #self.kpi.append(kpi)
            #self.kpi.append(kpi)

            #self.broadcast_info('DL_LAT_BREAKDOWN', kpi)
            #self.log_debug('DL_LAT_BREAKDOWN: ' + str(kpi))
            #self.store_kpi("KPI_Integrity_DL_LATENCY_BREAKDOWN", kpi, str(pdcp["timestamp"]))
            #self.upload_kpi("KPI.Integrity.UL_LAT_BREAKDOWN", kpi)
            write(kpi)
            del self.mapped_all[0]
Пример #40
0
        # cv2.imshow('Dilated', dilated)
        # cv2.imshow('Edges', edges)
        cv2.imshow('New', new)

        # box = cv2.boxPoints(rects)
        if (len(rects) != 0):
            for p in rects[0]:
                pt = (p[0], p[1])
                print(pt)
                cv2.circle(orig, pt, 5, (200, 0, 0), 2)
                transform_coordinates.append(list(pt))
            # sorted(transform_coordinates, key=lambda x: x[0])
            # transform_coordinates.sort(key = lambda transform_coordinates: transform_coordinates[0])

            transform_coordinates = sort_points(transform_coordinates)
            print(transform_coordinates)
            # del transform_coordinates[3]
            cv2.imshow("plank", orig)
            break

        # time.sleep(10)
        k = cv2.waitKey(30) & 0xff
        if (k == ord('q')):
            break

    trans_M = get_TransformMatrix(transform_coordinates)
    writer.write(cap, trans_M)

cap.release()
cv2.destroyAllWindows()