def update(self): '''Grab the remote file and store it locally.''' get_remote_file(self.full_path, self.channel_file, True) shell_command('gunzip -c %s > % s' % (self.channel_file ,self.channel_file[0:-3])) if self.has_release: full_path = '/'.join(self.full_path.split('/')[0:-1]) + '/Release' release_file = '_'.join(self.channel_file.split('_')[0:-1]) + '_Release' get_remote_file(full_path, release_file, True)
def update(self): '''Grab the remote file and store it locally.''' get_remote_file(self.full_path, self.channel_file, True) shell_command('gunzip -c %s > % s' % (self.channel_file, self.channel_file[0:-3])) if self.has_release: full_path = '/'.join(self.full_path.split('/')[0:-1]) + '/Release' release_file = '_'.join( self.channel_file.split('_')[0:-1]) + '_Release' get_remote_file(full_path, release_file, True)
def fetch_world_data(self): '''Update all remote source and channel data.''' for channel in self.sections.keys(): if isinstance(self.sections[channel][0], AptDebSection): p = re.compile('.*/dists/') m = p.match(self.sections[channel][0].full_path) remote_file = m.group() + self.sections[channel][0].dist + '/Release' p = re.compile('.*%%.*dists_') m = p.match(self.sections[channel][0].channel_file) local_file = m.group() + self.sections[channel][0].dist.replace('/','_') + '_Release' get_remote_file(remote_file, local_file, True) for dummy, section in self.iter_sections(): section.update() self.index_world_data()
def fetch_world_data(self): '''Update all remote source and channel data.''' for channel in self.sections.keys(): if isinstance(self.sections[channel][0], AptDebSection): p = re.compile('.*/dists/') m = p.match(self.sections[channel][0].full_path) remote_file = m.group( ) + self.sections[channel][0].dist + '/Release' p = re.compile('.*%%.*dists_') m = p.match(self.sections[channel][0].channel_file) local_file = m.group( ) + self.sections[channel][0].dist.replace('/', '_') + '_Release' get_remote_file(remote_file, local_file, True) for dummy, section in self.iter_sections(): section.update() self.index_world_data()
def update(self): '''Grab the remote file and store it locally.''' get_remote_file(self.repomd_path, self.repomd_data, True) primary_path, primary_data = self.get_primary_data() get_remote_file(primary_path, primary_data, True)
source_file = parts[2] try: progress.start() copy2(source_file, local_filename) progress.done() self.umask_permissions(local_filename) except IOError, e: if e.errno == 2 and os.path.exists(local_filename): raise CacheImportError('%s not found' % full_url) else: raise else: try: get_remote_file(full_url, local_filename, progress=progress) except pycurl.error, msg: raise CacheImportError('%s, %s' % (msg, full_url)) self.incorporate_file(local_filename, locator.blob_id) mass_progress.note_finished(locator.blob_id) mass_progress.write_progress() finally: if os.path.exists(local_filename): os.unlink(local_filename) def _add_links(self, source, blob_ids): '''Create visible links to the blob contained in source. Assume the blob_ids are correct. '''
if scheme in ('file', ''): source_file = parts[2] try: progress.start() copy2(source_file, local_filename) progress.done() self.umask_permissions(local_filename) except IOError, e: if e.errno == 2 and os.path.exists(local_filename): raise CacheImportError('%s not found' % full_url) else: raise else: try: get_remote_file(full_url, local_filename, progress = progress) except pycurl.error, msg: raise CacheImportError('%s, %s' % (msg, full_url)) self.incorporate_file(local_filename, locator.blob_id) mass_progress.note_finished(locator.blob_id) mass_progress.write_progress() finally: if os.path.exists(local_filename): os.unlink(local_filename) def _add_links(self, source, blob_ids): '''Create visible links to the blob contained in source. Assume the blob_ids are correct. ''' seed = self.make_download_filename()