def run(): if Dev.get("DO_NOT_UPDATE"): Run.prg("pip", "freeze > requirements.txt") return True Update.pull_updates_from_git() return Update.run_migrations()
def get_root_id(): # Check to see which root id we are getting key = KEY_CLOUD_ROOT_ID title = "Cloud ID Missing" if Dev.get("ALT_LOCATION"): key = KEY_CLOUD_ROOT_ID_DEV title = "Cloud DEVELOPMENT ID Missing" # Get the ID from Settings result = Settings.get_key(key) # If the ID doesnt exist in settings, lets ask for it! if not result: dialog = Dialog( title = title, body = [ f'The Cloud root ID is currently missing.. So lets find it!', f'\n', f'\n', f'Ask your administrator for the "Cloud Root Filepath" and', f'enter that path below.', f'\n', f'\n', ] ) ans = dialog.get_result("Filepath") result = Drive.get_id(search=ans, root="root") if not result: raise Exception("There was a problem with getting the Cloud Root ID..") Settings.set_key(key, result) return result
def create_banner(self): title = [ f'##################################################', f'LOFSongManager V{Settings.get_version()}'.center(50), f'##################################################', ] if Dev.isDev(): title.insert(0, self.create_dev_banner()) return "\n".join(title)
def is_dirty(self): # If there is locally saved changes that have yet to be uploaded to the cloud if Dev.get("ALL_IS_DIRTY"): return True if not self.get_song_file().exists(): Log("Studio One project file doesn't exist!", "warning") return False if not self.get_song_file(version="original").exists(): Log("Studio One 'original' project file doesn't exist!", "warning") return False return not filecmp.cmp(self.get_song_file(), self.get_song_file(version="original"), shallow=False)
def extract(filepath, destination): filepath = Path(filepath) destination = Path(destination) Log(f'Extracting "{filepath.stem}"..') if not Dev.get("NO_EXTRACT"): try: os.system( f"tar -xzf {filepath.absolute()} -C {destination.absolute()}" ) except Exception as e: Log(f'Failed to extract "{filepath.stem}"') Log(f'\n\n{e}\n\n', None) Log.press_enter() exit() Log(f'Finished extracting "{filepath.stem}!"')
def compress(folderpath, destination): folderpath = Path(folderpath) destination = Path(destination) cwd = Path.cwd() Log(f'Compressing "{folderpath.stem}"..') if not Dev.get("NO_COMPRESS"): try: os.system( f'cd "{folderpath.parent}" && tar -czf "{destination.absolute()}" "{folderpath.name}" && cd "{cwd.absolute()}"' ) except Exception as e: Log(f'Failed to compress "{folderpath.stem}"') Log(f'\n\n{e}\n\n', None) Log.press_enter() exit() Log(f'Finished compressing "{folderpath.stem}!"')
def download(ID, save_path): if Dev.get("NO_DOWNLOAD"): return True # We will attempt to download 5x before we give up i = 0 while i < 5: try: save_path = Path(save_path) request = Drive.service.files().get_media(fileId=ID) fh = io.BytesIO() downloader = MediaIoBaseDownload(fh, request, chunksize=51200*1024) print(f'----> "{save_path.name}" Downloaded 0%', end="\r", flush=True) done = False while done is False: status, done = downloader.next_chunk() if status: print(f'----> "{save_path.name}" Downloaded {int(status.progress() * 100)}%', end="\r", flush=True) # If we made it this far then we should be all set i = 99 except: i += 1 Log(f'Download attempt #{i}..',"notice") # If we got a return from the Drive AND we completed the while loop if downloader and i==99: print(f' "{save_path.name}" Downloaded successfully!') fh.seek(0) with open(save_path.absolute(), "wb") as f: shutil.copyfileobj(fh, f, length=1024*1024) return True Log(f'Failed to download "{save_path.name}"!',"warning") return False
def open_studio_one(self): Log("OPENING STUDIO ONE","notice") # First create a temp version of the project File.recursive_overwrite( self.get_song_file(), self.get_song_file(version="temp") ) Dialog( title = "Wait for Studio One to close!", body = "DO NOT CLOSE THIS WINDOW!!" ) if Dev.get("NO_OPEN_STUDIO_ONE"): # Do not open studio one return True # Build the dummy files self.set_dummy_files() # Open Studio One ans = Run.prg( alias = "open", command = f'{ self.get_song_file(version="temp") }', wait = True ) # Remove dummy files self.remove_dummy_files() if ans != 0: return False # Copy over any saved data to the original song file File.recursive_overwrite( self.get_song_file(version="temp"), self.get_song_file() ) File.delete( self.get_song_file(version="temp") ) return True
def __init__(self, text, endpoint="prod", quiet=False): endpoint = "dev" if Dev.isDev() else endpoint self._send_notification(text, endpoint, quiet)
def upload_project(self): # Make sure our project is the most up-to-date if not self.is_up_to_date(): Log( "There are updates for this project on the cloud!.. can not upload!", "warning") Log.press_enter() return False # Make sure we set the category if it's never been uploaded before if not self.is_remote(): self.change_category(back=False) if not Dev.get("NO_OPEN_STUDIO_ONE"): if self.dialog_remove_unused_audio_files(): if not self.open_studio_one(): return False # Create folder for mixdowns on cloud if it doesnt exist if not Drive.get_id(self.entry.name): Drive.mkdir(self.entry.name) # We will need to set the local hash on our own incase of failure # to upload if not self.compress_project(set_hash=False): return False # Upload compressed project Log("Uploading.. please be patient", "notice") if not Dev.get("NO_LOF_UPLOAD"): result = Drive.upload(filepath=self.get_cache_file(), mimeType=Drive.mimeType['zip']) if not result: Log("Drive upload could not complete..", "warning") Log.press_enter() return False # Update local hash Hash.set_project_hash(self, Hash.create_hash_from_project(self)) # Update remote hash if file was successfully uploaded self.entry.data["id"] = result self.entry.data["hash"] = Hash.get_project_hash(self) self.entry.update() Slack( f'{Slack.get_nice_username()} uploaded a new version of {Slack.make_nice_project_name(self.entry.name)}' ) # Remove name from dirty list self.remove_dirty() # Since we successfully uploaded to the drive, we can now get # rid of the *original song file File.recursive_overwrite(self.get_song_file(), self.get_song_file(version="original")) Log("Compression and upload complete!", "notice") return True
def clear_temp(): if not Dev.get("NO_CLEAR_TEMP"): Folder.clear("temp") Log("Temp directory cleared!")
def install_pip_packages(): Run.prg("pip", "install -r requirements.txt") if Dev.isDev(): Run.prg("pip", "freeze > requirements.txt")
def upload(filepath, mimeType, parent=None): # If no parent is specified, use the project root_id if not parent: parent = Drive.root_id if Dev.get("NO_UPLOAD"): return True filepath = Path(filepath) file = None # We will attempt to upload 5x before we give up i = 0 while i < 5: try: # Check to see if the file is already uploaded results = Drive.ls(parent) for r in results: if r["name"] == filepath.name: # Update file = Drive.service.files().update( fileId=r["id"], body={ "name": r["name"], }, media_body=MediaFileUpload( filepath.absolute(), chunksize=51200*1024, mimetype=mimeType, resumable=True ) ) break # If file is not already uploaded if not file: file = Drive.service.files().create( body={ "name": filepath.name, "parents": [ parent ], }, media_body=MediaFileUpload( filepath.absolute(), chunksize=51200*1024, mimetype=mimeType, resumable=True ) ) # Upload print(f'----> "{filepath.name}" Uploaded 0%', end="\r", flush=True) response = None while response is None: status, response = file.next_chunk() if status: print(f'----> "{filepath.name}" Uploaded {int(status.progress() * 100)}%', end="\r", flush=True) # If we made it this far then we should be all set i = 99 except: i += 1 Log(f'Upload attempt #{i}..',"notice") # If we got a return from the Drive AND we completed the while loop if file and i==99: print(f' "{filepath.name}" Uploaded successfully!') # Try 5x to get the new ID from the file i = 0 while i < 5: id = Drive.get_id(filepath.name, root=parent) if id: return id i += 1 return False else: Log(f'Failed to upload "{filepath.name}"!',"warning") return False