def new_log(position, log): # FM.change_directory('data/ordered/logs/') #Find the files = FM.list_files() file_chsn = 0 for i in range(0, len(files)): for n in range(0, len(files[i])): if files[i][n] == '.': file_num = int(files[i][:n]) if file_num > file_chsn: file_chsn = file_num if FM.file_length(str(file_chsn)) == 10: file_chsn += 1 FM.append_substring(str(file_chsn), log) # FM.change_directory('../dates/') FM.append_substring(str(file_chsn), get_time()) FM.change_directory('../coord/') # WILL BE EDITED IN FUTURE FM.append_substring(str(file_chsn), '40.5698452,-105.10956120000003') index = FM.file_length(str(file_chsn)) - 1 FM.change_directory('../../' + position) FM.append_substring('notes', str(file_chsn) + ',' + str(index) + ',0') # go back to surface folder FM.change_directory('../' * (LP.folder_layers(position) + 1))
def log_position(position, index): FM.change_directory('data/' + position) position = [] log_data = FM.get_substring('notes', index) for i in range(1, (len(position) - 1)): if log_data[-i] == ',': position = log_data[:-i].split(',') break # go back to surface folder FM.change_directory('../' * (LP.folder_layers(position) + 2)) return position
def bump_log(position, index): FM.change_directory('data/' + position) file_line = FM.get_substring('notes', index) count = 0 for i in range(0, len(file_line)): if file_line[i] == ',': count += 1 if count == 2: bump_count = int(file_line[i + 1:]) + 1 FM.edit_substring('notes', index, file_line[:i + 1] + str(bump_count)) break layers = LP.folder_layers(position) FM.change_directory('../' * (layers + 1))
def gather_logs(file_paths, log_paths): # cd to logs folder FM.change_directory('data/ordered/logs/') logs = [] for i in range(0, len(file_paths)): gathered_data = FM.get_all(str(file_paths[i])) for n in range(0, len(log_paths[i])): path = log_paths[i][n][0] log = gathered_data[path] index = log_paths[i][n][1] bump = log_paths[i][n][2] # For each log, holds the string, and index at notes.txt file, bump count logs.append([log, index, bump]) print(gathered_data[path]) FM.change_directory('../../../') return logs
def gather_paths(position): # cd to position and gather log paths and strengths FM.change_directory('data/' + position) paths = FM.get_all('notes') # separate paths information by comma and put into list for i in range(0, len(paths)): paths[i] = paths[i].split(',') paths[i].insert(2, i) for n in range(0, len(paths[i])): paths[i][n] = int(paths[i][n]) # find number of layers position is from data folder layers = LP.folder_layers(position) # cd back to logs folder (could be any of the ordered data folders) FM.change_directory('../' * layers + 'ordered/logs/') # get all file names in logs folder file_names = FM.list_files() # find number in each file name (take out .txt and convert to int) for i in range(0, len(file_names)): for n in range(0, len(file_names[i])): if file_names[i][n] == '.': file_names[i] = int(file_names[i][:n]) break # file_paths is for knowing the filename for the first file_paths = [] log_paths = [] # store each necessary file name in file_paths for i in range(0, len(file_names)): for n in range(0, len(paths)): if paths[n][0] == file_names[i]: file_paths.append(file_names[i]) break # setup log_paths for each file in file_paths for i in range(0, len(file_paths)): log_paths.append([]) # organize log and strength with their respective file for i in range(0, len(file_paths)): for n in range(0, len(paths)): if paths[n][0] == file_paths[i]: log_paths[i].append([paths[n][1], paths[n][2], paths[n][3]]) # cd back to bumplog folder FM.change_directory('../../../') return file_paths, log_paths
def main(): # Get to Blog folder FM.change_directory('../../content/blog/') # Find each post files = FM.list_files() posts = [] for i in range(0,len(files)): if files[i] == 'index.md': del files[i] break for i in range(0,len(files)): if files[i][-3:] == '.md': posts.append({'url' : files[i][:-3]}) # ----------- Search each file for unordered metadata ----------- # find_meta = ['title', 'author_id', 'date', 'description'] for i in range(0,len(posts)): meta = [] for j in range(0,len(find_meta)): meta.append(find_meta[j]) lines = FM.get_all(posts[i]['url'] + '.md') # Search front matter for required metadata encountered_before = False for j in range(0,len(lines)): ln = lines[j].strip() if ln == '+++': if encountered_before: lines = lines[j+1:] break else: encountered_before = True # if inside frontmatter if encountered_before: # see if current line contains required metadata for k in range(0,len(meta)): if ln[:len(meta[k])] == meta[k]: # Find the key value separator in line separator = -1 value = '' for n in range(len(meta[k]), len(ln)): if ln[n] == '=': value = ln[n+1:].strip() posts[i][meta[k]] = value del meta[k] break # If found all metadata if len(meta) == 0: # Find end of Front Matter before breaking for k in range(j,len(lines)): if lines[k].strip() == '+++': lines = lines[k+1:] break break # ERROR: if could not find all required metadata in frontmatter if len(meta) > 0: print('\nERROR:') print('Could not find ', end='') # print each item not able to be found in frontmatter for j in range(0,len(meta)): print(meta[j], end='') # if not last item in meta, print comma separator if j < (len(meta) - 1): print(', ', end='') print(' in file ' + posts[i] + '.md') raise SystemExit # Convert date to string in new key posts[i]['datestr'] = date_to_string(posts[i]['date']) # Get read time from markdown content posts[i]['readtime'] = str(get_readtime(lines)) # --------------------------------------------------------------- # # Once finished gathering metadata for each post # Arrange blog posts by date posts = arrange_by_date(posts) # Go back to config folder, then go into toml folder FM.change_directory('../../config/toml/') # Blog data file name file_name = 'blog.toml' # Make sure each required item has quotations surrounding them rqd_quotes = ['title', 'author_id', 'url', 'description', 'readtime', 'datestr'] for i in range(0,len(posts)): for j in range(0,len(rqd_quotes)): key_val = posts[i][rqd_quotes[j]] if (key_val[0] == '\'' or key_val[0] == '\"') and key_val[0] == key_val[-1]: continue else: new_string = '\"' for k in range(0,len(key_val)): if key_val[k] == '\"': new_string += '\\"' else: new_string += key_val[k] new_string += '\"' posts[i][rqd_quotes[j]] = new_string # TOML table name for each blog post table_name = '[[extra.blog]]' # Lines to be written to new TOML file toml_lines = [] for i in range(0,len(posts)): toml_lines.append(table_name) for key, value in posts[i].items(): toml_lines.append('\t' + str(key) + ' = ' + str(value)) toml_lines.append('') toml_files = FM.list_files() for i in range(0,len(toml_files)): if file_name == toml_files[i]: FM.remove_file(toml_files[i]) for i in range(0,len(toml_lines)): FM.append_substring(file_name, toml_lines[i])
## Get files inside toml folder w/ respect to load order, ## then compile into the main directory's config.toml file import file_manager as FM from datetime import datetime # ----------------------------- # load_order = ['top', 'people', 'blog', 'companies', 'articles', 'books'] # ----------------------------- # FM.change_directory('toml') config_lines = [] for i in range(0, len(load_order)): file_lines = FM.get_all(load_order[i] + '.toml') for n in range(0, len(file_lines)): config_lines.append(file_lines[n]) config_lines.append('') FM.change_directory('../../') if FM.file_exists('config.toml'): file_lines = FM.get_all('config.toml') if file_lines != config_lines: FM.change_directory('config') FM.create_directory('backup') FM.change_directory('backup') current_time = datetime.now().strftime('%Y-%m-%d %H:%M:%S')