def run(self,kwargs): #Verify argument validity self.__verify_arguments__() #Get cache name from remote url cache_name = self.get_cache_name() #Get cache path cache_path = os.path.join(self.cache_root, cache_name) #Get cache object (will raise an exception if it doesn't exist) cache = LocalCache(cache_path, host_id=default_host_id) cache.DEBUG = self.DEBUG cache.lock(os.path.join(cache_path,self.dependency_platform, self.dependency_name, self.dependency_version)) #Try to get entry from cache entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) #Set state if entry is None: state = EntryState.DOWNLOADED else: state = EntryState.CACHE #Verify Remote MD5 #We need to download the file, it wasn't in the cache if entry is None: print (self.format_entry_name() + ": Not in cache") full_url = s3.join_s3_url(self.dependency_repo, self.dependency_platform, self.dependency_name, self.dependency_version) print (self.format_entry_name() + ": Downloading " + full_url) #Get Downloader downloader = s3.AsyncS3Downloader(None) #Set Downloader arguments downloader.source_url = full_url downloader.destination_path = os.path.join(self.cache_root, "downloading") + os.sep downloader.start() #Wait for downloader to finish #TODO: Do something with the reported progress while downloader.status != module.DONE: time.sleep(0.5) #Check for an exception, if so bubble it up if downloader.exception is not None: raise downloader.exception print self.format_entry_name() + ": Download complete" if downloader.result is None or len(downloader.result) == 0: raise EntryError(self.format_entry_name() + ": Unable to find remote entry '" + full_url + "'") #Iterate of the result (downloaded files) for item, checksum in downloader.result: local_file_checksum = mfile.md5_checksum(item) if checksum != local_file_checksum: print(self.format_entry_name() + ": WARNING: Downloaded file does not match the checksum on the server") print(self.format_entry_name() + ": WARNING: local:\t" + str(local_file_checksum)) print(self.format_entry_name() + ": WARNING: server:\t" + str(checksum)) if self.dependency_unpack: print (self.format_entry_name() + ": Unpacking...") #Put will unlock cache.put(item,self.dependency_platform, self.dependency_name, self.dependency_version, unpack=self.dependency_unpack) entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) if entry is None: raise EntryError(self.format_entry_name() + ": Error retrieving entry from cache.") cache.unlock(os.path.join(cache_path,self.dependency_platform, self.dependency_name, self.dependency_version)) #Entry is not None, return all the files listed in the entry that aren't the configuration files return [os.path.abspath(os.path.join(entry.path,f)) for f in os.listdir(entry.path) if f != ".umpire"], state
def run(self, kwargs): # Verify argument validity self.__verify_arguments__() # Get cache name from remote url cache_name = self.get_cache_name() # Get cache path cache_path = os.path.join(self.cache_root, cache_name) # Get cache object (will raise an exception if it doesn't exist) cache = LocalCache(cache_path, host_id=default_host_id) cache.DEBUG = self.DEBUG cache.lock(os.path.join(cache_path, self.dependency_platform, self.dependency_name, self.dependency_version)) # Try to get entry from cache entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) # Set state if entry is None: state = EntryState.DOWNLOADED else: state = EntryState.CACHE # Verify Remote MD5 # We need to download the file, it wasn't in the cache if entry is None: print (self.format_entry_name() + ": Not in cache") full_url = s3.join_s3_url( self.dependency_repo, self.dependency_platform, self.dependency_name, self.dependency_version ) print (self.format_entry_name() + ": Downloading " + full_url) # Get Downloader downloader = s3.AsyncS3Downloader(None) # Set Downloader arguments downloader.source_url = full_url downloader.destination_path = os.path.join(self.cache_root, "downloading") + os.sep downloader.start() # Wait for downloader to finish #TODO: Do something with the reported progress while downloader.status != module.DONE: time.sleep(0.5) # Check for an exception, if so bubble it up if downloader.exception is not None: raise downloader.exception print self.format_entry_name() + ": Download complete" if downloader.result is None or len(downloader.result) == 0: raise EntryError(self.format_entry_name() + ": Unable to find remote entry '" + full_url + "'") # Iterate of the result (downloaded files) for item, checksum in downloader.result: local_file_checksum = mfile.md5_checksum(item) if checksum != local_file_checksum: print ( self.format_entry_name() + ": WARNING: Downloaded file does not match the checksum on the server" ) print (self.format_entry_name() + ": WARNING: local:\t" + str(local_file_checksum)) print (self.format_entry_name() + ": WARNING: server:\t" + str(checksum)) if self.dependency_unpack: print (self.format_entry_name() + ": Unpacking...") # Put will unlock cache.put( item, self.dependency_platform, self.dependency_name, self.dependency_version, unpack=self.dependency_unpack, ) entry = cache.get(self.dependency_platform, self.dependency_name, self.dependency_version) if entry is None: raise EntryError(self.format_entry_name() + ": Error retrieving entry from cache.") cache.unlock(os.path.join(cache_path, self.dependency_platform, self.dependency_name, self.dependency_version)) # Entry is not None, return all the files listed in the entry that aren't the configuration files return [os.path.abspath(os.path.join(entry.path, f)) for f in os.listdir(entry.path) if f != ".umpire"], state