def write_to_current(header, value):
    row_dl = logger.readCSV(POOL_CLIPS_DATA_CSV_PATH)
    cur_row_num = utils.get_cur_row_num(row_dl)
    row_dl[cur_row_num][header] = value

    logger.logList(row_dl, POOL_CLIPS_DATA_CSV_PATH, False, HEADER_LIST,
                   'overwrite')
def move_current_to_row_num(row_num):
    row_dl = get_csv_row_dl()
    cur_row_num = utils.get_cur_row_num(row_dl)
    row_dl[cur_row_num]['current'] = ''
    row_dl[row_num]['current'] = '1'
    logger.logList(row_dl, POOL_CLIPS_DATA_CSV_PATH, False, HEADER_LIST,
                   'overwrite')
def init_current_if_needed():
    row_dl = get_csv_row_dl()
    for row_d in row_dl:
        if row_d['current'] == '1':
            return
    row_dl[0]['current'] = '1'
    logger.logList(row_dl, POOL_CLIPS_DATA_CSV_PATH, False, HEADER_LIST,
                   'overwrite')
Ejemplo n.º 4
0
def rename_clips_for_order(non_eval_clips_row_dl):
    for row_num, row_d in enumerate(non_eval_clips_row_dl):
        proper_clip_path = NON_EVAL_CLIPS_DIR_PATH + '/non_eval_' + str(
            row_num) + '.mp4'
        if row_d['clip_path'] != proper_clip_path:
            os.rename(row_d['clip_path'], proper_clip_path)
            row_d['clip_path'] = proper_clip_path
    logger.logList(non_eval_clips_row_dl, NON_EVAL_CLIPS_DATA_CSV_PATH, False,
                   NON_EVAL_CLIPS_DATA_CSV_HEADER_LIST, 'overwrite')
def build_unknown_chars_csv(input_txt_file_path, font_path):
    input_lines_t = tools.read_text_file(input_txt_file_path)

    unknown_char_dl = build_unknown_char_dl(input_lines_t, font_path)

    header_order_list = [
        'correct_char', 'unknown_char_unicode', '#_occurrences', 'example'
    ]
    logger.logList(unknown_char_dl, UNKNOWN_CHAR_CSV_FILENAME,
                   WANT_UNKOWN_CHAR_CSV_BACKUP, header_order_list, 'overwrite')
def prune_by_row_dl(prune_row_dl):
    row_dl = get_csv_row_dl()

    prune_id_l = []
    for prune_row_d in prune_row_dl:
        prune_id_l.append(prune_row_d['postId'])

    for row_d in row_dl:
        if row_d['postId'] in prune_id_l:
            row_d['status'] = 'pruned'
    logger.logList(row_dl, POOL_CLIPS_DATA_CSV_PATH, False, HEADER_LIST,
                   'overwrite')
Ejemplo n.º 7
0
def load_unknown_chars_csv(unknown_chars_csv_path, corrected_chars_csv_path):
    unknown_chars_dl = logger.readCSV(unknown_chars_csv_path)
    
    #make sure unknown_chars csv has had all correct_chars added
    if all_correct_chars_entered(unknown_chars_dl) == False:
        raise TypeError('ERROR:  You must enter all values for "correct_char" in unknown_chars')
    
    #read original corrected chars csv if it exists
    if os.path.isfile(corrected_chars_csv_path) == True:
        og_corrected_chars_dl = logger.readCSV(corrected_chars_csv_path)
    else:
        og_corrected_chars_dl = []
        
    new_corrected_chars_dl = find_new_corrected_chars(og_corrected_chars_dl, unknown_chars_dl)
    
    header_order_list = ['correct_char', 'unknown_char_unicode', 'example']
    
    if new_corrected_chars_dl != []:
        logger.logList(new_corrected_chars_dl, corrected_chars_csv_path, WANT_BACKUP, header_order_list, 'append')
def move_current(move_amount):
    row_dl = logger.readCSV(POOL_CLIPS_DATA_CSV_PATH)

    # get row num of original current clip and set current 'current' = ''
    og_current_row_num = utils.get_cur_row_num(row_dl)
    row_dl[og_current_row_num]['current'] = ''

    #print(og_current_row_num)
    new_cur_row_num = og_current_row_num + move_amount
    if new_cur_row_num not in range(len(row_dl)):
        if move_amount > 0:
            new_cur_row_num = 0
        else:
            new_cur_row_num = len(row_dl) - 1

    row_dl[new_cur_row_num]['current'] = '1'

    logger.logList(row_dl, POOL_CLIPS_DATA_CSV_PATH, False, HEADER_LIST,
                   'overwrite')
Ejemplo n.º 9
0
        log_dl.append(log_dict)
        pos += 1
    return log_dl


CSV_FILENAME = 'output.csv'
INPUT_FILE_NAME = 'input.txt'
NEW_BOX_STR = 'NEWBOX'
#need to split up by newbox
#need to get name of box
#need to make csv

input_tup = read_text_file(INPUT_FILE_NAME)
print(input_tup)

input_str = format_data(input_tup)
print(input_str)

print('')

serial_num_ll = build_sn_ll(input_str)
print(serial_num_ll)
print('')

log_dict_list = make_log_dict_list(serial_num_ll)
print(log_dict_list)
print('')

logger.logList(log_dict_list, CSV_FILENAME)

print('done')
Ejemplo n.º 10
0
def export_to_csv(export_filename, output_filename):
   row_dict_list = export_format.build_row_dict_list(export_filename)
   logger.logList(row_dict_list, output_filename, True)
Ejemplo n.º 11
0
    def _log_non_eval_clips():
        def __make_og_non_eval_post_id_clip_path_dl():
            new_row_dl = []
            pool_row_dl = logger.readCSV(CURRENT_DATA_DIR_PATH +
                                         '/pool_clips_data.csv')

            for pool_row_d in pool_row_dl:
                if pool_row_d['status'] == '':
                    new_row_dl.append({
                        'postId': pool_row_d['postId'],
                        'clip_path': pool_row_d['clip_path']
                    })
            return new_row_dl

        def __get_post_id_l(non_eval_clips_row_dl):
            post_id_l = []
            for row_dl in non_eval_clips_row_dl:
                post_id_l.append(row_dl['postId'])
            return post_id_l

        file_system_utils.make_dir_if_not_exist(NON_EVAL_CLIPS_DIR_PATH)
        try:
            non_eval_clips_row_dl = logger.readCSV(
                NON_EVAL_CLIPS_DATA_CSV_PATH)
        except FileNotFoundError:
            non_eval_clips_row_dl = []

        # make row_dl of postIDs and original clip paths
        og_non_eval_post_id_clip_path_dl = __make_og_non_eval_post_id_clip_path_dl(
        )

        # build final_non_eval_post_id_clip_path_dl - contains postId and new clip path that clip is about to be saved to
        # also will not include any postIds that are already logged
        final_non_eval_post_id_clip_path_dl = []
        existing_post_id_l = __get_post_id_l(non_eval_clips_row_dl)

        clips_added = 0
        for d in og_non_eval_post_id_clip_path_dl:
            if d['postId'] not in existing_post_id_l:
                new_save_name = 'non_eval_' + str(
                    len(non_eval_clips_row_dl) + clips_added) + '.mp4'
                final_non_eval_post_id_clip_path_dl.append({
                    'postId':
                    d['postId'],
                    'clip_path':
                    NON_EVAL_CLIPS_DIR_PATH + '/' + new_save_name
                })
                clips_added += 1

        # copy all non-evaluated clips to thier new home in non_eval_clips
        # could just rename, but this is nicer for testing
        og_pos = 0
        for d in final_non_eval_post_id_clip_path_dl:
            while (d['postId'] !=
                   og_non_eval_post_id_clip_path_dl[og_pos]['postId']):
                og_pos += 1
            og_clip_path = og_non_eval_post_id_clip_path_dl[og_pos][
                'clip_path']
            file_system_utils.copy_files_to_dest([og_clip_path],
                                                 NON_EVAL_CLIPS_DIR_PATH)
            just_copied_clip_path = NON_EVAL_CLIPS_DIR_PATH + '/' + ntpath.basename(
                og_clip_path)
            os.rename(just_copied_clip_path, d['clip_path'])

        # add info from final_non_eval_post_id_clip_path_dl to non_eval_clips_row_dl
        for row_d in final_non_eval_post_id_clip_path_dl:
            non_eval_clips_row_dl.append(row_d)

        logger.logList(non_eval_clips_row_dl, NON_EVAL_CLIPS_DATA_CSV_PATH,
                       False, NON_EVAL_CLIPS_DATA_CSV_HEADER_LIST, 'overwrite')
Ejemplo n.º 12
0
    def get_confirmed_code_dl__and_is_complete(store_name, value, quantity):
        def get_datetime_from_dt_csv_str(datetime_csv_str):
            ss = str_utils.multi_dim_split(['-', ' ', ':', "'"],
                                           datetime_csv_str)
            return datetime(int(ss[0]), int(ss[1]), int(ss[2]), int(ss[3]),
                            int(ss[4]), int(ss[5]))

        def add_to_code_d_if_exists_in_row_d(code_d, row_d, key_):
            if key_ in row_d.keys():
                code_d[key_] = row_d[key_]
            return code_d

        def build_code_d(row_d):
            code_d = {}

            header = 'main_code'
            if header in row_d.keys():
                code_d[header] = row_d[header][:-1]

            code_d = add_to_code_d_if_exists_in_row_d(code_d, row_d, 'pin')
            code_d = add_to_code_d_if_exists_in_row_d(
                code_d, row_d, 'biz_id'
            )  # eventually remove !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
            return code_d

        confirmed_code_dl = []

        unused_code_csv_path = get__store_unused_codes_csv_path(
            code_req_d['store_name'])

        # return empty if code csv does not exist
        if not fsu.is_file(unused_code_csv_path):
            return confirmed_code_dl

        row_dl = logger.readCSV(unused_code_csv_path)
        store = STORE_D[
            store_name]  # will eventually be replaced with Store(store_name) !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
        header_l = store.csv_header_l  # will eventually get this from config !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!

        row_num = 0
        while (len(confirmed_code_dl) < quantity and row_num < len(row_dl)):
            row_d = row_dl[row_num]
            if float(row_d['adv_value']) == float(value):

                code_d = build_code_d(row_d)

                last_confirm_datetime = get_datetime_from_dt_csv_str(
                    row_d['last_confirmed'])
                datetime_since_last_confirm = datetime.now(
                ) - last_confirm_datetime
                sec_since_last_confirm = datetime_since_last_confirm.total_seconds(
                )

                # if it has been too long since last check, re-check code
                if sec_since_last_confirm > MAX_CONFIRMED_CODE_AGE_DAYS * 3600:

                    #                     real_value = store.get_code_value(code_d) # put back !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                    real_value = 50  # remove, just for testing !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
                    print('using ', real_value,
                          ' as test #, should check code for real, PUT BACK'
                          )  #`````````````````````````````````````````

                    # if after checking, the real value is less than the  value,
                    # remove the code from unused_codes and put it in failed_codes
                    if real_value < float(row_d['adv_value']):
                        logger.removeRowByHeaderVal('og_code_str',
                                                    row_d['og_code_str'],
                                                    unused_code_csv_path,
                                                    errorIfHeaderNotExist=True)

                        failed_codes_csv_path = get__store_failed_codes_csv_path(
                            store_name)
                        logger.logList(row_dl,
                                       failed_codes_csv_path,
                                       wantBackup=True,
                                       headerList=header_l,
                                       overwriteAction='append')
                        break

                # if code not old, or if you just checked and confirmed the code
                confirmed_code_dl.append(code_d)
            row_num += 1

        return confirmed_code_dl, len(confirmed_code_dl) == quantity
def write_to_row_num(row_num, header, value):
    row_dl = get_csv_row_dl()
    row_dl[row_num][header] = value

    logger.logList(row_dl, POOL_CLIPS_DATA_CSV_PATH, False, HEADER_LIST,
                   'overwrite')