def download_parts(): service = util.build_service() options = util.parse_options("folder_creator_options.json") if options == None: return # Ensure that there are parts to download if len(options["download-parts"]) == 0: print(f'ERROR: No parts specified.') return # Get the current parts id print("Verifying DigitalLibrary format...") library_id = util.get_digital_library(service).get("library_id") if library_id == None: return 1 curr_parts_id = util.get_separated_folders(service, library_id)["sec_curr"] if curr_parts_id == None: return 1 for part in options["download-parts"]: check_stop_script() print(f'Downloading files for part "{part}"') pdf_tools.download_part_files( service, curr_parts_id, part, os.path.join(options["folder-dir"], "parts"), options["verbose"]) print("Finished downloading parts") return 0
def verify_redvest(service, redvest_options): # Ensure redvest folder exists check_stop_script() parent_name = redvest_options['parent-name'] res = util.get_folder_ids(service, name=parent_name) if res == None: print( f'ERROR: "{parent_name}" folder not found in Red Vest directory, please check Google Drive' ) return None, None redvest_id = res[0] folder_name = redvest_options["folder-name"] # See if target folder already exists if util.get_folder_ids(service, name=folder_name, parent=redvest_id) != None: print(f'ERROR: Folder \"{folder_name}\" already exists!') print( "(You may need to remove the folder from the Trash in the Librarianz Drive)" ) return None, None # Create new class folder new_folder_id = util.make_folder(service, folder_name, redvest_id) # Create resources folder new_resources_id = util.make_folder(service, "Resources", new_folder_id) return new_folder_id, new_resources_id
def generate_toc_data(toc_maps, options, styles): # Make a list of all the chartz, in lettered -> numbered -> teazers order raw_list = [] # list of title in toc + dollie for map in toc_maps: if not map: continue for key, val in map.items(): title, _, _ = util.parse_file(os.path.basename(val)) # Case insensitive match raw_list.append((f'{key}: {title}', 0 + (title in options["dollie-songs"]))) raw_list.append(("", False)) if len(raw_list) > 0: del raw_list[-1] # Refactor the data into table format check_stop_script() data = [] num_cols = options["toc"]["num-cols"] num_rows = -(len(raw_list) // -num_cols) for i in range(num_rows): row = [] for j in range(num_cols): index = (num_rows * j) + i text, style = ("", styles[0]) if index >= len(raw_list) else (raw_list[index][0], styles[raw_list[index][1]]) row.append(Paragraph(text=text, style=style)) data.append(row) return data
def download_part_files(service, curr_parts_id, part, dir, verbose=False): # Retrieve files folders = util.get_drive_files(service, curr_parts_id, files_only=False, name=part) if not folders or len(folders) != 1: thread_print(f'ERROR: Unable to find folder "{part}"') return files = util.get_drive_files(service, folders[0].get("id"), file_types=[".pdf"], is_shortcut=True) if not files or len(files) == 0: thread_print(f'WARNING: Could not find any part files for "{part}"') return # Validate target directory check_stop_script() path = os.path.join(dir, part) validate_dir(path, verbose) # Delete all existing files in that directory to_delete = glob.glob(f'{path}/*') for f in to_delete: os.remove(f) # Download files for file in files: check_stop_script() util.download_file(service, file["shortcutDetails"]["targetId"], path, file.get("name"), verbose) # Print success thread_print(f'Successfully finished downloading part files for "{part}"')
def redvest_creator(): # Build service service = util.build_service() # Read folder locations redvest_options = util.parse_options("redvest_options.json") if redvest_options == None: return 1 # Verify all needed folders exist and retrieve their ids print("Verifying DigitalLibrary format...") lib_ids = util.get_digital_library(service) current_chartz_id = lib_ids.get("current_id") future_chartz_id = lib_ids.get("future_id") if current_chartz_id == None or future_chartz_id == None: return 1 # Verify (and collect) all chart ids print("Validating Chartz...") chart_ids = [ verify_chart_name(service, chart, [current_chartz_id, future_chartz_id]) for chart in redvest_options["chartz"] ] if None in chart_ids: print('Try double-check your spelling, chart names are case-sensitive') print('ERROR: Redvest folder will not be created') return 1 print("Verifying Redvest folder...") new_folder_id, new_resources_id = verify_redvest(service, redvest_options) if new_folder_id == None or new_resources_id == None: return 1 # Only make individual section folders if field set to true alias_map = None section_ids = None check_stop_script() if redvest_options["individual-sections"]: # Read parts parts_dict = util.parse_options("parts.json")['parts'] if parts_dict == None: return 1 # Make individual section folders print("Making individual section folders...") section_ids = make_section_folders(service, new_folder_id, parts_dict.keys()) # Invert parts_dict to create map of alias's -> folder alias_map = util.make_alias_map(parts_dict) # Write each chart to the new redvest folder for index, chart in enumerate(redvest_options["chartz"]): write_song(service, chart, chart_ids[index], new_folder_id, new_resources_id, section_ids, alias_map) print( f'Successfully created new folder "{redvest_options["folder-name"]}"!') return 0
def add_live_part_shortcuts(service, live_id, age, new_folders, exclude): for part in new_folders['sec'][age]: check_stop_script() if part in exclude: print( f'WARNING: Part "{part}" will be excluded from the Live Digital Library' ) continue util.make_shortcut(service, part, new_folders['sec'][age][part], live_id)
def clear_broken_shortcuts(): # Build drive service = util.build_service() check_stop_script() # Verify all needed folders exist and retrieve their ids print("Verifying DigitalLibrary format...") lib_ids = util.get_digital_library(service) library_id = lib_ids.get("library_id") if library_id == None: return 1 sep_ids = util.get_separated_folders(service, library_id) if sep_ids == None: return 1 sep_parts = { age: { folder["name"]: folder["id"] for folder in util.get_drive_files( service, sep_ids[f"sec_{age}"], files_only=False) } for age in ["curr", "old", "future"] } sep_audio = { age: { folder["name"]: folder["id"] for folder in util.get_drive_files( service, sep_ids[f"aud_{age}"], files_only=False) } for age in ["curr", "old", "future"] } print("Examining shortcuts...") allFolders = [] for age in ["curr", "old", "future"]: allFolders.extend(sep_parts[age].values()) allFolders.extend(sep_audio[age].values()) allFolders.append(sep_ids[f"sib_{age}"]) for id in allFolders: for file in util.get_drive_files(service, id, is_shortcut=True): targetId = file.get("shortcutDetails").get("targetId") try: targetFile = service.files().get(fileId=targetId, fields="trashed").execute() if targetFile["trashed"] is True: raise Exception except: print( f'File pointed to by {file.get("name")} is a broken shortcut.' ) service.files().delete(fileId=file.get("id")).execute() return 0
def enumerate_pages(files, options, style=0, start=None, page_map=None, write_pages=False, no_filler_before=None, verbose=False): pages = [] if start == None: start = 1 if style == 0 else 'A' counter = start # Read filler pages if filler needs to be interlaced filler = None if no_filler_before: filler = add_filler(options) valid_filler_indeces = [] for file in files: check_stop_script() try: # Save page num assignment to map if page_map != None: page_map[counter] = file if filler and file not in no_filler_before: valid_filler_indeces.append(len(pages)) # Read input file input = PdfFileReader(open(file, 'rb')) num_pages = input.getNumPages() page_num = f'{counter}' # Add all the pages to the list for i in range(num_pages): input_page: PageObject = input.getPage(i) # Verify that it has the proper dimensions mediabox = input_page.mediaBox if not validate_mediabox(mediabox, options): thread_print(f'WARNING: Page {i + 1} in "{file}" has incorrect dimensions\nExpected {options["page-size"]["width"]} x {options["page-size"]["height"]}, received {float(mediabox.getWidth()) / inch} x {float(mediabox.getHeight()) / inch}.') continue # Calculate this page number if not write_pages: continue if num_pages > 1: page_num = f'{counter}.{i + 1}' pages.append(add_page_num(input_page, page_num, options) if options["enumerate-pages"] else input_page) except OSError: thread_print(f'Error when parsing "{file}"') # Increment Counter counter = (counter + 1) if style == 0 else (chr(ord(counter) + 1)) # Interlace filler (if applicable) if filler: interlace_filler(pages, filler, valid_filler_indeces) return pages
def get_all_chart_folders(service, curr_id, old_id, future_id): output = {} # Get every chart folder per age group for parent, age in [(curr_id, 'curr'), (old_id, 'old'), (future_id, 'future')]: check_stop_script() output[age] = {} res = util.get_drive_files(service, parent, files_only=False, folders_only=True) for item in res: output[age][item.get('name')] = item.get('id') return output
def reset_permissions(): # Build drive service = util.build_service() check_stop_script() # Verify all needed folders exist and retrieve their ids print("Verifying DigitalLibrary format...") lib_ids = util.get_digital_library(service) library_id = lib_ids.get("library_id") if library_id == None: return 1 sep_ids = util.get_separated_folders(service, library_id) if sep_ids == None: return 1 print("Reading chart permissions...") all_chart_permissions = get_all_chart_permissions( service, lib_ids.get("current_id"), lib_ids.get("past_id"), lib_ids.get("future_id")) all_chart_batch = service.new_batch_http_request(callback=batch_callback) print(len(all_chart_permissions.keys())) fixed = fix_permissions(service, all_chart_batch, all_chart_permissions, allPrivate=True) if fixed: print("Making all chart folders private...") all_chart_batch.execute() for chartName in all_chart_permissions: entry_batch = service.new_batch_http_request(callback=batch_callback) chartId = all_chart_permissions[chartName][0] # Edit this line if you need to reset the permissions for a single chart # if chartName != 'Example Chart': continue print(f'Resetting permissions for "{chartName}"') entry_permissions = get_all_entry_permissions(service, chartId) parts_id, audio_id = util.get_parts_and_audio_folders( service, chartName, chartId) entry_permissions.update(get_all_entry_permissions(service, parts_id)) entry_permissions.update(get_all_entry_permissions(service, audio_id)) fixed = fix_permissions(service, entry_batch, entry_permissions) if fixed: print(f'Executing permissions updates') sleep(2) # Avoid too many api calls entry_batch.execute() print("Successfully reset permissions")
def validate_folder_files(): options = parse_options("folder_creator_options.json") if options == None: return 1 for part in options["folder-parts"]: check_stop_script() # Validate part files title_map = pdf_tools.validate_part(part, options) if not title_map: continue # Generate sample table of contents if options["toc"]["generate-on-validation"]: validate_toc(part, title_map, options) print("Finished validating folders and parts") return 0
def add_individual_parts(service, parts_id, audio_id, section_ids, alias_map): # Get and process parts for id, abbr in ((parts_id, 'sec'), (audio_id, 'aud')): if id == None: continue for item in util.get_drive_files(service, id): check_stop_script() _, part, _ = util.parse_file(item.get('name'), alias_map) # Part mapping doesn't exist if (part == util.NO_PART or not part): print( f'WARNING: Cannot add "{item.get("name")}" - part not found' ) continue # Make shortcut util.make_shortcut(service, item.get('name'), item.get('id'), section_ids[abbr][part])
def generate_toc(toc_maps, options, file, verbose=False): # Get page num information check_stop_script() width, height = (inch * options["page-size"]["width"], inch * options["page-size"]["height"]) path = os.path.dirname(file) validate_dir(path, verbose) check_stop_script() # Generate toc doc and data toc = SimpleDocTemplate(file, pagesize=(width, height), leftMargin=inch * .5, rightMargin=inch * .5, topMargin=.75 * inch, bottomMargin=inch*.25) check_stop_script() styles = generate_toc_styles(options) data = generate_toc_data(toc_maps, options, styles) check_stop_script() # Construct table table = Table(data) table.setStyle(TableStyle([ ('RIGHTPADDING', (0,0), (-1,-1), -3), ('LEFTPADDING', (0,0), (-1,-1), -3), ('BOTTOMPADDING', (0,0), (-1,-1), 0), ('TOPPADDING', (0,0), (-1,-1), 0) ])) # Add title and footer def onFirstPage(canvas: canvas.Canvas, doc): canvas.saveState() font, size = (options["toc"]["title"]["font"], options["toc"]["title"]["size"]) canvas.setFont(font, size) canvas.drawCentredString(width / 2, height - (.02 * inch * size), options["toc"]["title"]["label"]) font, size = (options["toc"]["footer"]["font"], options["toc"]["footer"]["size"]) canvas.setFont(font, size) canvas.drawCentredString(width / 2, (.02 * inch * size), options["toc"]["footer"]["label"]) canvas.restoreState() # write the document to disk check_stop_script() toc.build([table], onFirstPage=onFirstPage) if verbose: thread_print(f'Successfully created table of contents file at "{file}"')
def create_part_folder(part, options): # Table of contents + page number path toc_path = os.path.join(options["folder-dir"], "tmp", f"toc-{part}.pdf") # Validate part files thread_print(f'Writing {part} folder...') title_map = pdf_tools.validate_part(part, options) if not title_map: return 1 check_stop_script() # Write pages output = PdfFileWriter() toc_maps = [{}, {}, {}] for page in pdf_tools.generate_parts_pages(title_map, toc_maps, options, part, write_pages=True, verbose=options['verbose']): output.addPage(page) # Add table of contents (toc) if options['verbose']: thread_print(f'Generating {part} Table of Contents') pdf_tools.generate_toc(toc_maps, options, toc_path) output.insertPage(pdf_tools.to_pages(toc_path)[0], 0) # Write file check_stop_script() file_path = os.path.join(options["folder-dir"], "Output") pdf_tools.validate_dir(file_path, options["verbose"]) with open( os.path.join(file_path, f'{options["folder-name"]} - {part}.pdf'), 'wb') as f: output.write(f) thread_print(f'Successfully wrote {part} folder to "{file_path}"') # Cleanup temp files if os.path.isfile(toc_path): os.remove(toc_path) return 0
def get_lsjumb_digital_chartz_id(service, library_id): # Find "[LIVE] DigitalLibrary" folder check_stop_script() res = util.get_folder_ids(service, name="[LIVE] DigitalLibrary", parent=library_id) if not res or len(res) != 1: print( 'ERROR: Unable to find "[LIVE] DigitalLibrary" directory in "DigitalLibrary"' ) return None live_dig_lib_id = res[0] # Find (and return) "LSJUMB Digital Chartz" folder res = util.get_folder_ids(service, name="LSJUMB Digital Chartz", parent=live_dig_lib_id) if not res or len(res) != 1: print( 'ERROR: Unable to find "LSJUMB Digital Chartz" directory in "[LIVE] DigitalLibrary"' ) return None return res[0]
def get_separated_folders(service, library_id): check_stop_script() separated_ids = {} folder_names = [("Separated Sibelius Files", "sib"), ("Separated Section Parts", "sec"), ("Separated Part Audio", "aud")] for folder_name, abbr in folder_names: # Look for folder within Digital Library ids = get_folder_ids(service, name=folder_name, parent=library_id) if not has_unique_folder(ids, folder_name, "Digital Library"): return None separated_ids[abbr] = ids[0] # Look for Current, Old, Future, Archived Chartz folders for age_name, age in [("Current Chartz", "curr"), ("Old Chartz", "old"), ("Future Chartz", "future")]: age_ids = get_folder_ids(service, name=age_name, parent=ids[0]) if not has_unique_folder(age_ids, age_name, folder_name): return None separated_ids[f'{abbr}_{age}'] = age_ids[0] return separated_ids
def write_song(service, chart, chart_id, new_folder_id, new_resources_id, section_ids=None, alias_map=None): check_stop_script() print(f"Writing chart \"{chart}\"...") # add shortcut to parts folder check_stop_script() add_parts_shortcut(service, chart, chart_id, new_folder_id, section_ids, alias_map) # add audio/video content to 'Resources' folder check_stop_script() add_resources(service, chart, chart_id, new_resources_id)
def upload_files(): # Build service service = util.build_service() # Read options alias_map = util.make_alias_map(util.parse_options("parts.json")['parts']) options = util.parse_options("upload_options.json") if options == None: return 1 check_stop_script() # Get list of files that need to be uploaded files = util.get_dir_files(options["resources-directory"], options["supported-file-types"]) check_stop_script() # Verify all needed folders exist and retrieve their ids print("Verifying DigitalLibrary format...") lib_ids = util.get_digital_library(service) library_id = lib_ids.get("library_id") if library_id == None: return 1 check_stop_script() separated_ids = util.get_separated_folders(service, library_id) if separated_ids == None: return 1 check_stop_script() # Cache will store folder + parts folder ids and a list of files cache = {} # 0 = update only, 1 = new files only, 2 = update and add new files mode = options["mode"] # Create new folders (if in proper mode) if mode != 0: for chart_info in options["new-chartz"]: check_stop_script() chart_dest = chart_info["to"] new_chart_dest_key = "current_id" if chart_dest == 0 else "past_id" if chart_dest == 1 else "future_id" if chart_dest == 2 else "archive_id" chart_id, parts_id, audio_id = lib_management.create_chart_structure( service, lib_ids.get(new_chart_dest_key), chart_info["name"]) if chart_id: cache[chart_info["name"]] = { "chart_id": chart_id, "parts_id": parts_id, "audio_id": audio_id, "loc": chart_info["to"], "files": [] } # Operate on files for file in files: # Check to see if we should exit check_stop_script() # Populate cache lib_management.populate_cache(service, lib_ids.get("current_id"), lib_ids.get("past_id"), lib_ids.get("future_id"), lib_ids.get("archive_id"), util.parse_file(file, alias_map)[0], cache, options) updated = None added = None # Update file if mode != 1: updated = lib_management.update_file(service, file, alias_map, cache, options) # Add file if mode != 0 and not updated: added = lib_management.add_file(service, file, separated_ids, alias_map, cache, options) # print output if updated == True: print(f'Successfully updated "{file}"') elif added == True: print(f'Successfully added "{file}"') elif updated == False: print(f'ERROR: Unable to update "{file}"') print("Finished uploading files") return 0
def get_digital_library(service): """Verify that the DigitalLibrary folder and subfolders are in the correct locations. """ check_stop_script() library_id, full_dig_id, current_id, future_id, archive_id = None, None, None, None, None # DigitalLibrary folder library_res = get_folder_ids(service, name="DigitalLibrary", parent="root") if not has_unique_folder(library_res, "DigitalLibrary", "My Drive"): return {} library_id = library_res[0] # Digitized Chart Data folder full_dig_res = get_folder_ids(service, name="LSJUMB Full Digitized Chart Data", parent=library_id) if not has_unique_folder(full_dig_res, "LSJUMB Full Digitized Chart Data", "DigitalLibrary"): return {} full_dig_id = full_dig_res[0] # Current Chartz folder current_res = get_folder_ids(service, name="Current Chartz", parent=full_dig_id) if not has_unique_folder(current_res, "Current Chartz", "LSJUMB Full Digitized Chart Data"): return {} current_id = current_res[0] # Old Chartz folder past_res = get_folder_ids(service, name="Old Chartz", parent=full_dig_id) if not has_unique_folder(past_res, "Old Chartz", "LSJUMB Full Digitized Chart Data"): return {} past_id = past_res[0] # Future Chartz folder future_res = get_folder_ids(service, name="Future Chartz", parent=full_dig_id) if not has_unique_folder(future_res, "Future Chartz", "LSJUMB Full Digitized Chart Data"): return {} future_id = future_res[0] # Archived Chartz folder archive_res = get_folder_ids(service, name="Archived Chartz", parent=full_dig_id) if not has_unique_folder(archive_res, "Archived Chartz", "LSJUMB Full Digitized Chart Data"): return {} archive_id = archive_res[0] return { "library_id": library_id, "current_id": current_id, "past_id": past_id, "future_id": future_id, "archive_id": archive_id }
def build_service(): check_stop_script() return build('drive', 'v3', credentials=fetch_credentials())
def write_shortcuts(service, chartname, id, age, new_folders, alias_map): # Find the sibelius file check_stop_script() res = util.get_drive_files(service, id, ['.sib']) if not res or len(res) == 0: print(f'WARNING: Could not find Sibelius file for chart "{chartname}"') else: if len(res) > 1: print( f'WARNING: Multiple Sibelius files found for chart "{chartname}". The first one seen will be used in the shortcut.' ) util.make_shortcut(service, res[0].get('name'), res[0].get('id'), new_folders[f'sep_sib_{age}']) # Find the chart's Parts and Audio folder check_stop_script() parts_id, audio_id = util.get_parts_and_audio_folders( service, chartname, id) if not parts_id: return # Create a shortcut for each part in the Parts folder res = util.get_drive_files(service, parts_id, ['.pdf']) if not res or len(res) == 0: print(f'ERROR: No parts files found for chart "{chartname}"') return for partfile in res: # Ensure part exists in the system check_stop_script() partfile_name = partfile.get('name') _, part, _ = util.parse_file(partfile_name, alias_map) if part == None or part == util.NO_PART: print( f'WARNING: Part file "{partfile_name}" has no matching part folder' ) continue # Make the shortcut util.make_shortcut(service, partfile_name, partfile.get('id'), new_folders['sec'][age][part]) # Create a shortcut for each part in the Audio folder res = util.get_drive_files(service, audio_id) if not res or len(res) == 0: print(f'WARNING: No audio files found for chart "{chartname}"') return for audiofile in res: # Ensure audio part exists in the system check_stop_script() audiofile_name = audiofile.get('name') _, part, _ = util.parse_file(audiofile_name, alias_map) if part == None or part == util.NO_PART: print( f'WARNING: Audio file "{audiofile_name}" has no matching part folder' ) continue # Make the shortcut util.make_shortcut(service, audiofile_name, audiofile.get('id'), new_folders['aud'][age][part])
def verify_chart_name(service, chart, parent_ids): check_stop_script() chart_id = util.get_chart_id(service, chart, parent_ids).get("chart_id") return chart_id